commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
e69a4df879a60584eb3dfaabbb852698594e9362
|
models/src/main/java/com/vimeo/networking2/enums/BillingStatusType.kt
|
models/src/main/java/com/vimeo/networking2/enums/BillingStatusType.kt
|
package com.vimeo.networking2.enums
/**
* The status of this user's billing information.
*/
enum class BillingStatusType(override val value: String?) : StringValue {
/**
* The user's billing information is active.
*/
ACTIVE("active"),
/**
* The user's billing information has been canceled.
*/
CANCELED("canceled"),
/**
* The user's billing information has expired.
*/
EXPIRED("expired"),
/**
* The user's billing information is in a grace period before it's put on hold.
*/
GRACE_PERIOD("grace_period"),
/**
* The user's billing information is currently on hold.
*/
ON_HOLD("on_hold"),
/**
* Unknown billing status.
*/
UNKNOWN(null)
}
|
package com.vimeo.networking2.enums
/**
* The status of this user's billing information.
*/
enum class BillingStatusType(override val value: String?) : StringValue {
/**
* The user's billing information is active and will auto-renew.
*/
ACTIVE("active"),
/**
* The user's billing information has been canceled, but the subscription could still be active depending
* on if the expiration date has passed.
*/
CANCELED("canceled"),
/**
* The user's billing information is in a pending state and is likely to change.
*/
PENDING("pending"),
/**
* The user's billing information is in a grace period before it's put on hold while the auto-renew
* charge is retried.
*/
GRACE_PERIOD("grace_period"),
/**
* The user's billing information is currently on hold due to a payment failure and the grace period expiring.
* (Google Play only)
*/
ON_HOLD("on_hold"),
/**
* Unknown billing status.
*/
UNKNOWN(null)
}
|
Change expired to pending and updated comments
|
Change expired to pending and updated comments
|
Kotlin
|
mit
|
vimeo/vimeo-networking-java,vimeo/vimeo-networking-java,vimeo/vimeo-networking-java
|
kotlin
|
## Code Before:
package com.vimeo.networking2.enums
/**
* The status of this user's billing information.
*/
enum class BillingStatusType(override val value: String?) : StringValue {
/**
* The user's billing information is active.
*/
ACTIVE("active"),
/**
* The user's billing information has been canceled.
*/
CANCELED("canceled"),
/**
* The user's billing information has expired.
*/
EXPIRED("expired"),
/**
* The user's billing information is in a grace period before it's put on hold.
*/
GRACE_PERIOD("grace_period"),
/**
* The user's billing information is currently on hold.
*/
ON_HOLD("on_hold"),
/**
* Unknown billing status.
*/
UNKNOWN(null)
}
## Instruction:
Change expired to pending and updated comments
## Code After:
package com.vimeo.networking2.enums
/**
* The status of this user's billing information.
*/
enum class BillingStatusType(override val value: String?) : StringValue {
/**
* The user's billing information is active and will auto-renew.
*/
ACTIVE("active"),
/**
* The user's billing information has been canceled, but the subscription could still be active depending
* on if the expiration date has passed.
*/
CANCELED("canceled"),
/**
* The user's billing information is in a pending state and is likely to change.
*/
PENDING("pending"),
/**
* The user's billing information is in a grace period before it's put on hold while the auto-renew
* charge is retried.
*/
GRACE_PERIOD("grace_period"),
/**
* The user's billing information is currently on hold due to a payment failure and the grace period expiring.
* (Google Play only)
*/
ON_HOLD("on_hold"),
/**
* Unknown billing status.
*/
UNKNOWN(null)
}
|
# ... existing code ...
enum class BillingStatusType(override val value: String?) : StringValue {
/**
* The user's billing information is active and will auto-renew.
*/
ACTIVE("active"),
/**
* The user's billing information has been canceled, but the subscription could still be active depending
* on if the expiration date has passed.
*/
CANCELED("canceled"),
/**
* The user's billing information is in a pending state and is likely to change.
*/
PENDING("pending"),
/**
* The user's billing information is in a grace period before it's put on hold while the auto-renew
* charge is retried.
*/
GRACE_PERIOD("grace_period"),
/**
* The user's billing information is currently on hold due to a payment failure and the grace period expiring.
* (Google Play only)
*/
ON_HOLD("on_hold"),
# ... rest of the code ...
|
3ef98829f869fc94404894ef9ef315d673088608
|
test/number.c
|
test/number.c
|
// Copyright 2012 Rui Ueyama <[email protected]>
// This program is free software licensed under the MIT license.
#include "test.h"
void testmain(void) {
print("numeric constants");
expect(1, 0x1);
expect(17, 0x11);
expect(511, 0777);
expect(11, 0b1011); // GNU extension
expect(3, 3L);
expect(3, 3LL);
expect(3, 3UL);
expect(3, 3LU);
expect(3, 3ULL);
expect(3, 3LU);
expect(3, 3LLU);
expectd(55.3, 55.3);
expectd(200, 2e2);
expectd(0x0.DE488631p8, 0xDE.488631);
expect(4, sizeof(5));
expect(8, sizeof(5L));
expect(4, sizeof(3.0f));
expect(8, sizeof(3.0));
}
|
// Copyright 2012 Rui Ueyama <[email protected]>
// This program is free software licensed under the MIT license.
#include "test.h"
void testmain(void) {
print("numeric constants");
expect(1, 0x1);
expect(17, 0x11);
expect(511, 0777);
expect(11, 0b1011); // GNU extension
expect(11, 0B1011); // GNU extension
expect(3, 3L);
expect(3, 3LL);
expect(3, 3UL);
expect(3, 3LU);
expect(3, 3ULL);
expect(3, 3LU);
expect(3, 3LLU);
expectd(55.3, 55.3);
expectd(200, 2e2);
expectd(0x0.DE488631p8, 0xDE.488631);
expect(4, sizeof(5));
expect(8, sizeof(5L));
expect(4, sizeof(3.0f));
expect(8, sizeof(3.0));
}
|
Add a test for '0B' prefix.
|
Add a test for '0B' prefix.
|
C
|
mit
|
8l/8cc,rui314/8cc,gergo-/8cc,nobody1986/8cc,vastin/8cc,rui314/8cc,nobody1986/8cc,andrewchambers/8cc,vastin/8cc,8l/8cc,andrewchambers/8cc,vastin/8cc,8l/8cc,cpjreynolds/8cc,jtramm/8cc,abc00/8cc,8l/8cc,gergo-/8cc,gergo-/8cc,nobody1986/8cc,jtramm/8cc,andrewchambers/8cc,vastin/8cc,rui314/8cc,cpjreynolds/8cc,cpjreynolds/8cc,abc00/8cc,nobody1986/8cc,jtramm/8cc,jtramm/8cc,cpjreynolds/8cc,abc00/8cc,andrewchambers/8cc,abc00/8cc,rui314/8cc
|
c
|
## Code Before:
// Copyright 2012 Rui Ueyama <[email protected]>
// This program is free software licensed under the MIT license.
#include "test.h"
void testmain(void) {
print("numeric constants");
expect(1, 0x1);
expect(17, 0x11);
expect(511, 0777);
expect(11, 0b1011); // GNU extension
expect(3, 3L);
expect(3, 3LL);
expect(3, 3UL);
expect(3, 3LU);
expect(3, 3ULL);
expect(3, 3LU);
expect(3, 3LLU);
expectd(55.3, 55.3);
expectd(200, 2e2);
expectd(0x0.DE488631p8, 0xDE.488631);
expect(4, sizeof(5));
expect(8, sizeof(5L));
expect(4, sizeof(3.0f));
expect(8, sizeof(3.0));
}
## Instruction:
Add a test for '0B' prefix.
## Code After:
// Copyright 2012 Rui Ueyama <[email protected]>
// This program is free software licensed under the MIT license.
#include "test.h"
void testmain(void) {
print("numeric constants");
expect(1, 0x1);
expect(17, 0x11);
expect(511, 0777);
expect(11, 0b1011); // GNU extension
expect(11, 0B1011); // GNU extension
expect(3, 3L);
expect(3, 3LL);
expect(3, 3UL);
expect(3, 3LU);
expect(3, 3ULL);
expect(3, 3LU);
expect(3, 3LLU);
expectd(55.3, 55.3);
expectd(200, 2e2);
expectd(0x0.DE488631p8, 0xDE.488631);
expect(4, sizeof(5));
expect(8, sizeof(5L));
expect(4, sizeof(3.0f));
expect(8, sizeof(3.0));
}
|
# ... existing code ...
expect(17, 0x11);
expect(511, 0777);
expect(11, 0b1011); // GNU extension
expect(11, 0B1011); // GNU extension
expect(3, 3L);
expect(3, 3LL);
# ... rest of the code ...
|
840dce03718947498e72e561e7ddca22c4174915
|
django_olcc/olcc/context_processors.py
|
django_olcc/olcc/context_processors.py
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
record = ImportRecord.objects.latest()
if record:
return {
'last_updated': record.created_at
}
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
try:
return {
'last_updated': ImportRecord.objects.latest().created_at
}
except ImportRecord.DoesNotExist:
pass
|
Fix a DoesNotExist bug in the olcc context processor.
|
Fix a DoesNotExist bug in the olcc context processor.
|
Python
|
mit
|
twaddington/django-olcc,twaddington/django-olcc,twaddington/django-olcc
|
python
|
## Code Before:
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
record = ImportRecord.objects.latest()
if record:
return {
'last_updated': record.created_at
}
## Instruction:
Fix a DoesNotExist bug in the olcc context processor.
## Code After:
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
try:
return {
'last_updated': ImportRecord.objects.latest().created_at
}
except ImportRecord.DoesNotExist:
pass
|
# ... existing code ...
Inject the last import date into the request context.
"""
def last_updated(request):
try:
return {
'last_updated': ImportRecord.objects.latest().created_at
}
except ImportRecord.DoesNotExist:
pass
# ... rest of the code ...
|
e1cf80ea96f8f668211fb99780b20db7c1c4097a
|
phoneProfilesPlus/src/main/java/sk/henrichg/phoneprofilesplus/ActivatorTargetHelpsActivity.java
|
phoneProfilesPlus/src/main/java/sk/henrichg/phoneprofilesplus/ActivatorTargetHelpsActivity.java
|
package sk.henrichg.phoneprofilesplus;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class ActivatorTargetHelpsActivity extends AppCompatActivity {
public static ActivatorTargetHelpsActivity activity;
//public static ActivateProfileActivity activatorActivity;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
overridePendingTransition(0, 0);
activity = this;
}
@Override
protected void onResume()
{
super.onResume();
if (ActivateProfileActivity.getInstance() == null) {
finish();
return;
}
ActivateProfileActivity.getInstance().showTargetHelps();
}
@Override
public void finish()
{
super.finish();
overridePendingTransition(0, 0);
}
}
|
package sk.henrichg.phoneprofilesplus;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class ActivatorTargetHelpsActivity extends AppCompatActivity {
public static ActivatorTargetHelpsActivity activity;
//public static ActivateProfileActivity activatorActivity;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
overridePendingTransition(0, 0);
activity = this;
}
@Override
protected void onResume()
{
super.onResume();
if (ActivateProfileActivity.getInstance() == null) {
finish();
return;
}
GlobalGUIRoutines.setTheme(this, true, false, false);
GlobalGUIRoutines.setLanguage(getBaseContext());
ActivateProfileActivity.getInstance().showTargetHelps();
}
@Override
public void finish()
{
super.finish();
overridePendingTransition(0, 0);
}
}
|
Set theme for activator target help.
|
Set theme for activator target help.
|
Java
|
apache-2.0
|
henrichg/PhoneProfilesPlus
|
java
|
## Code Before:
package sk.henrichg.phoneprofilesplus;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class ActivatorTargetHelpsActivity extends AppCompatActivity {
public static ActivatorTargetHelpsActivity activity;
//public static ActivateProfileActivity activatorActivity;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
overridePendingTransition(0, 0);
activity = this;
}
@Override
protected void onResume()
{
super.onResume();
if (ActivateProfileActivity.getInstance() == null) {
finish();
return;
}
ActivateProfileActivity.getInstance().showTargetHelps();
}
@Override
public void finish()
{
super.finish();
overridePendingTransition(0, 0);
}
}
## Instruction:
Set theme for activator target help.
## Code After:
package sk.henrichg.phoneprofilesplus;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class ActivatorTargetHelpsActivity extends AppCompatActivity {
public static ActivatorTargetHelpsActivity activity;
//public static ActivateProfileActivity activatorActivity;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
overridePendingTransition(0, 0);
activity = this;
}
@Override
protected void onResume()
{
super.onResume();
if (ActivateProfileActivity.getInstance() == null) {
finish();
return;
}
GlobalGUIRoutines.setTheme(this, true, false, false);
GlobalGUIRoutines.setLanguage(getBaseContext());
ActivateProfileActivity.getInstance().showTargetHelps();
}
@Override
public void finish()
{
super.finish();
overridePendingTransition(0, 0);
}
}
|
# ... existing code ...
return;
}
GlobalGUIRoutines.setTheme(this, true, false, false);
GlobalGUIRoutines.setLanguage(getBaseContext());
ActivateProfileActivity.getInstance().showTargetHelps();
}
# ... rest of the code ...
|
678594fb68845d3aec80c935fc0cd0fe89ce26b5
|
shakedown/dcos/service.py
|
shakedown/dcos/service.py
|
from dcos import (marathon, mesos, package, util)
from dcos.errors import DCOSException
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return False
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['id']:
return service['id']
return False
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['tasks']:
return service['tasks']
return False
|
from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
|
Return None or Empty List
|
Return None or Empty List
Return None when an object cannot be found or an empty list when a list
type is expected.
|
Python
|
apache-2.0
|
dcos/shakedown
|
python
|
## Code Before:
from dcos import (marathon, mesos, package, util)
from dcos.errors import DCOSException
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return False
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['id']:
return service['id']
return False
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['tasks']:
return service['tasks']
return False
## Instruction:
Return None or Empty List
Return None when an object cannot be found or an empty list when a list
type is expected.
## Code After:
from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
|
...
from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
...
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
...
|
2cb2779bfe1ddfcd6651665276ed0a1d513c57de
|
fireplace/cards/wog/shaman.py
|
fireplace/cards/wog/shaman.py
|
from ..utils import *
##
# Minions
class OG_023:
"Primal Fusion"
play = Buff(TARGET, "OG_023t") * Count(FRIENDLY_MINIONS + TOTEM)
OG_023t = buff(+1, +1)
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
|
from ..utils import *
##
# Minions
class OG_023:
"Primal Fusion"
play = Buff(TARGET, "OG_023t") * Count(FRIENDLY_MINIONS + TOTEM)
OG_023t = buff(+1, +1)
class OG_026:
"Eternal Sentinel"
play = UnlockOverload(CONTROLLER)
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
##
# Spells
class OG_206:
"Stormcrack"
play = Hit(TARGET, 4)
##
# Weapons
class OG_031:
"Hammer of Twilight"
deathrattle = Summon(CONTROLLER, "OG_031a")
|
Implement Eternal Sentinel, Stormcrack and Hammer of Twilight
|
Implement Eternal Sentinel, Stormcrack and Hammer of Twilight
|
Python
|
agpl-3.0
|
NightKev/fireplace,beheh/fireplace,jleclanche/fireplace
|
python
|
## Code Before:
from ..utils import *
##
# Minions
class OG_023:
"Primal Fusion"
play = Buff(TARGET, "OG_023t") * Count(FRIENDLY_MINIONS + TOTEM)
OG_023t = buff(+1, +1)
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
## Instruction:
Implement Eternal Sentinel, Stormcrack and Hammer of Twilight
## Code After:
from ..utils import *
##
# Minions
class OG_023:
"Primal Fusion"
play = Buff(TARGET, "OG_023t") * Count(FRIENDLY_MINIONS + TOTEM)
OG_023t = buff(+1, +1)
class OG_026:
"Eternal Sentinel"
play = UnlockOverload(CONTROLLER)
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
##
# Spells
class OG_206:
"Stormcrack"
play = Hit(TARGET, 4)
##
# Weapons
class OG_031:
"Hammer of Twilight"
deathrattle = Summon(CONTROLLER, "OG_031a")
|
# ... existing code ...
OG_023t = buff(+1, +1)
class OG_026:
"Eternal Sentinel"
play = UnlockOverload(CONTROLLER)
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
##
# Spells
class OG_206:
"Stormcrack"
play = Hit(TARGET, 4)
##
# Weapons
class OG_031:
"Hammer of Twilight"
deathrattle = Summon(CONTROLLER, "OG_031a")
# ... rest of the code ...
|
cbaa4b9979fc4e2d04b302a7d9eeccb3d7068f4f
|
src/main/java/edu/harvard/iq/dataverse/DataverseUserServiceBean.java
|
src/main/java/edu/harvard/iq/dataverse/DataverseUserServiceBean.java
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse;
import java.util.List;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
/**
*
* @author xyang
*/
@Stateless
@Named
public class DataverseUserServiceBean {
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
public String encryptPassword(String plainText) {
return PasswordEncryption.getInstance().encrypt(plainText);
}
public DataverseUser save(DataverseUser dataverseUser) {
return em.merge(dataverseUser);
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse;
import java.util.List;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
/**
*
* @author xyang
*/
@Stateless
@Named
public class DataverseUserServiceBean {
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
public String encryptPassword(String plainText) {
return PasswordEncryption.getInstance().encrypt(plainText);
}
public DataverseUser save(DataverseUser dataverseUser) {
return em.merge(dataverseUser);
}
/*
public List<DataverseUser> findByUserName(String userName) {
Query query = em.createQuery("select object(o) from DataverseUser as o where o.userName =:userName");
query.setParameter("userName", userName);
return query.getResultList();
}
*/
public DataverseUser findByUserName(String userName) {
String query = "SELECT u from DataverseUser u where u.userName = :userName ";
DataverseUser user = null;
try {
user = (DataverseUser) em.createQuery(query).setParameter("userName", userName).getSingleResult();
} catch (javax.persistence.NoResultException e) {
}
return user;
}
}
|
Check if a username exists.
|
Check if a username exists.
|
Java
|
apache-2.0
|
jacksonokuhn/dataverse,bmckinney/dataverse-canonical,bmckinney/dataverse-canonical,jacksonokuhn/dataverse,ekoi/DANS-DVN-4.6.1,quarian/dataverse,quarian/dataverse,leeper/dataverse-1,majorseitan/dataverse,quarian/dataverse,jacksonokuhn/dataverse,bmckinney/dataverse-canonical,leeper/dataverse-1,leeper/dataverse-1,jacksonokuhn/dataverse,quarian/dataverse,quarian/dataverse,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,bmckinney/dataverse-canonical,JayanthyChengan/dataverse,jacksonokuhn/dataverse,leeper/dataverse-1,ekoi/DANS-DVN-4.6.1,majorseitan/dataverse,majorseitan/dataverse,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,quarian/dataverse,JayanthyChengan/dataverse,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,majorseitan/dataverse,jacksonokuhn/dataverse,bmckinney/dataverse-canonical,quarian/dataverse,leeper/dataverse-1,majorseitan/dataverse,majorseitan/dataverse,leeper/dataverse-1,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,quarian/dataverse,JayanthyChengan/dataverse,ekoi/DANS-DVN-4.6.1,bmckinney/dataverse-canonical,majorseitan/dataverse,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,JayanthyChengan/dataverse,leeper/dataverse-1,ekoi/DANS-DVN-4.6.1,jacksonokuhn/dataverse,jacksonokuhn/dataverse,leeper/dataverse-1,majorseitan/dataverse
|
java
|
## Code Before:
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse;
import java.util.List;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
/**
*
* @author xyang
*/
@Stateless
@Named
public class DataverseUserServiceBean {
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
public String encryptPassword(String plainText) {
return PasswordEncryption.getInstance().encrypt(plainText);
}
public DataverseUser save(DataverseUser dataverseUser) {
return em.merge(dataverseUser);
}
}
## Instruction:
Check if a username exists.
## Code After:
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse;
import java.util.List;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
/**
*
* @author xyang
*/
@Stateless
@Named
public class DataverseUserServiceBean {
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
public String encryptPassword(String plainText) {
return PasswordEncryption.getInstance().encrypt(plainText);
}
public DataverseUser save(DataverseUser dataverseUser) {
return em.merge(dataverseUser);
}
/*
public List<DataverseUser> findByUserName(String userName) {
Query query = em.createQuery("select object(o) from DataverseUser as o where o.userName =:userName");
query.setParameter("userName", userName);
return query.getResultList();
}
*/
public DataverseUser findByUserName(String userName) {
String query = "SELECT u from DataverseUser u where u.userName = :userName ";
DataverseUser user = null;
try {
user = (DataverseUser) em.createQuery(query).setParameter("userName", userName).getSingleResult();
} catch (javax.persistence.NoResultException e) {
}
return user;
}
}
|
...
public DataverseUser save(DataverseUser dataverseUser) {
return em.merge(dataverseUser);
}
/*
public List<DataverseUser> findByUserName(String userName) {
Query query = em.createQuery("select object(o) from DataverseUser as o where o.userName =:userName");
query.setParameter("userName", userName);
return query.getResultList();
}
*/
public DataverseUser findByUserName(String userName) {
String query = "SELECT u from DataverseUser u where u.userName = :userName ";
DataverseUser user = null;
try {
user = (DataverseUser) em.createQuery(query).setParameter("userName", userName).getSingleResult();
} catch (javax.persistence.NoResultException e) {
}
return user;
}
}
...
|
ab83da7b6152dfdc50cf40fa328f3c3d24c13861
|
emailmgr/urls.py
|
emailmgr/urls.py
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from views import email_add, email_list, email_delete, \
email_send_activation, email_activate, email_make_primary
#add an email to a User account
urlpatterns = patterns('',
url(
r'^email/add/$',
email_add,
name='emailmgr_email_add'
),
url(
r'^email/send_activation/(?P<identifier>\w+)/$',
email_send_activation,
name='emailmgr_email_send_activation'
),
url(
r'^email/activate/(?P<identifier>\w+)/$',
email_activate,
name='emailmgr_email_activate'
),
url(
r'^email/make_primary/(?P<identifier>\w+)/$',
email_make_primary,
name='emailmgr_email_make_primary'
),
url(
r'^email/delete/(?P<identifier>\w+)/$',
email_delete,
name='emailmgr_email_delete'
),
url(
r'^email/$',
email_list,
name='emailmgr_email_list'
),
)
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from views import email_add, email_list, email_delete, \
email_send_activation, email_activate, email_make_primary
#add an email to a User account
urlpatterns = patterns('',
url(
r'^email/add/$',
email_add,
name='emailmgr_email_add'
),
url(
r'^email/send_activation/(?P<identifier>\w+)/$',
email_send_activation,
name='emailmgr_email_send_activation'
),
url(
r'^email/activate/(?P<identifier>\w+)/$',
email_activate,
name='emailmgr_email_activate'
),
url(
r'^email/make_primary/(?P<identifier>\w+)/$',
email_make_primary,
name='emailmgr_email_make_primary'
),
url(
r'^email/delete/(?P<identifier>\w+)/$',
email_delete,
name='emailmgr_email_delete'
),
url(
r'^email/$',
email_list,
name='emailmgr_email_list'
),
)
|
Fix import for django 1.6
|
Fix import for django 1.6
|
Python
|
bsd-3-clause
|
un33k/django-emailmgr,un33k/django-emailmgr
|
python
|
## Code Before:
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from views import email_add, email_list, email_delete, \
email_send_activation, email_activate, email_make_primary
#add an email to a User account
urlpatterns = patterns('',
url(
r'^email/add/$',
email_add,
name='emailmgr_email_add'
),
url(
r'^email/send_activation/(?P<identifier>\w+)/$',
email_send_activation,
name='emailmgr_email_send_activation'
),
url(
r'^email/activate/(?P<identifier>\w+)/$',
email_activate,
name='emailmgr_email_activate'
),
url(
r'^email/make_primary/(?P<identifier>\w+)/$',
email_make_primary,
name='emailmgr_email_make_primary'
),
url(
r'^email/delete/(?P<identifier>\w+)/$',
email_delete,
name='emailmgr_email_delete'
),
url(
r'^email/$',
email_list,
name='emailmgr_email_list'
),
)
## Instruction:
Fix import for django 1.6
## Code After:
from django.conf.urls import patterns, include, url
from django.conf import settings
from views import email_add, email_list, email_delete, \
email_send_activation, email_activate, email_make_primary
#add an email to a User account
urlpatterns = patterns('',
url(
r'^email/add/$',
email_add,
name='emailmgr_email_add'
),
url(
r'^email/send_activation/(?P<identifier>\w+)/$',
email_send_activation,
name='emailmgr_email_send_activation'
),
url(
r'^email/activate/(?P<identifier>\w+)/$',
email_activate,
name='emailmgr_email_activate'
),
url(
r'^email/make_primary/(?P<identifier>\w+)/$',
email_make_primary,
name='emailmgr_email_make_primary'
),
url(
r'^email/delete/(?P<identifier>\w+)/$',
email_delete,
name='emailmgr_email_delete'
),
url(
r'^email/$',
email_list,
name='emailmgr_email_list'
),
)
|
// ... existing code ...
from django.conf.urls import patterns, include, url
from django.conf import settings
from views import email_add, email_list, email_delete, \
email_send_activation, email_activate, email_make_primary
// ... rest of the code ...
|
148b9de1809ebc1f11bf6daa7663403617410372
|
Scanner/src/main/java/rabbit/CommandCreator.java
|
Scanner/src/main/java/rabbit/CommandCreator.java
|
package rabbit;
public class CommandCreator {
public String createCommand(String userCommand) {
String[] tokens = userCommand.split("\\s+");
switch(tokens[0]) {
case "availability":
return "nmap -sV " + tokens[1];
case "security":
switch (tokens[1]) {
case "tls":
return "nmap --script ssl-enum-ciphers -p 443 " + tokens[2];
case "ecrypt2lvl":
return "nmap --script ssl-enum-ciphers -p 443" + tokens[2];
case "open_ports":
return "nmap " + tokens[2];
default :
return null;
}
default :
return null;
}
}
}
|
package rabbit;
public class CommandCreator {
public String createCommand(String userCommand) {
String[] tokens = userCommand.split("\\s+");
switch(tokens[0]) {
case "security":
switch (tokens[1]) {
case "tls":
return "nmap --script ssl-enum-ciphers -p 443 " + tokens[2];
case "ecrypt2lvl":
return "nmap --script ssl-enum-ciphers -p 443" + tokens[2];
case "open_ports":
return "nmap " + tokens[2];
default :
return null;
}
default :
return null;
}
}
}
|
Remove check for "availability" command because this case is treated separately.
|
Remove check for "availability" command because this case is treated separately.
|
Java
|
apache-2.0
|
IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring
|
java
|
## Code Before:
package rabbit;
public class CommandCreator {
public String createCommand(String userCommand) {
String[] tokens = userCommand.split("\\s+");
switch(tokens[0]) {
case "availability":
return "nmap -sV " + tokens[1];
case "security":
switch (tokens[1]) {
case "tls":
return "nmap --script ssl-enum-ciphers -p 443 " + tokens[2];
case "ecrypt2lvl":
return "nmap --script ssl-enum-ciphers -p 443" + tokens[2];
case "open_ports":
return "nmap " + tokens[2];
default :
return null;
}
default :
return null;
}
}
}
## Instruction:
Remove check for "availability" command because this case is treated separately.
## Code After:
package rabbit;
public class CommandCreator {
public String createCommand(String userCommand) {
String[] tokens = userCommand.split("\\s+");
switch(tokens[0]) {
case "security":
switch (tokens[1]) {
case "tls":
return "nmap --script ssl-enum-ciphers -p 443 " + tokens[2];
case "ecrypt2lvl":
return "nmap --script ssl-enum-ciphers -p 443" + tokens[2];
case "open_ports":
return "nmap " + tokens[2];
default :
return null;
}
default :
return null;
}
}
}
|
...
String[] tokens = userCommand.split("\\s+");
switch(tokens[0]) {
case "security":
switch (tokens[1]) {
case "tls":
...
|
bab5b61ff3e26ec9f1053645192b0bc7677a8cf6
|
vilebot/src/com/oldterns/vilebot/handlers/admin/GetLog.java
|
vilebot/src/com/oldterns/vilebot/handlers/admin/GetLog.java
|
package com.oldterns.vilebot.handlers.admin;
import ca.szc.keratin.bot.annotation.HandlerContainer;
import ca.szc.keratin.core.event.message.recieve.ReceivePrivmsg;
import com.oldterns.vilebot.db.LogDB;
import net.engio.mbassy.listener.Handler;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by eunderhi on 18/08/15.
*/
@HandlerContainer
public class GetLog {
private static final Pattern showLog = Pattern.compile("^!showLog$");
private static final Pattern deleteLog = Pattern.compile("^!deleteLog$");
@Handler
private void getLog(ReceivePrivmsg event) {
String text = event.getText();
boolean showLogMatches = showLog.matcher(text).matches();
boolean deleteLogMatches = deleteLog.matcher(text).matches();
if (showLogMatches) {
event.reply(LogDB.getLog());
}
else if(deleteLogMatches) {
LogDB.deleteLog();
}
}
}
|
package com.oldterns.vilebot.handlers.admin;
import ca.szc.keratin.bot.annotation.HandlerContainer;
import ca.szc.keratin.core.event.message.recieve.ReceivePrivmsg;
import com.oldterns.vilebot.db.GroupDB;
import com.oldterns.vilebot.db.LogDB;
import com.oldterns.vilebot.util.Sessions;
import net.engio.mbassy.listener.Handler;
import java.util.regex.Pattern;
/**
* Created by eunderhi on 18/08/15.
*/
@HandlerContainer
public class GetLog {
private static final Pattern showLog = Pattern.compile("!admin showLog$");
private static final Pattern deleteLog = Pattern.compile("!admin deleteLog$");
@Handler
private void getLog(ReceivePrivmsg event) {
String text = event.getText();
String sender = event.getSender();
String username = Sessions.getSession(sender);
boolean showLogMatches = showLog.matcher(text).matches();
boolean deleteLogMatches = deleteLog.matcher(text).matches();
if(GroupDB.isAdmin(username)) {
if (showLogMatches) {
event.reply(LogDB.getLog());
} else if (deleteLogMatches) {
LogDB.deleteLog();
}
}
}
}
|
Make showLog and deleteLog admin commands
|
Make showLog and deleteLog admin commands
|
Java
|
mit
|
oldterns/VileBot,emmettu/VileBot,itpun/VileBot,emmettu/VileBot,oldterns/VileBot,itpun/VileBot
|
java
|
## Code Before:
package com.oldterns.vilebot.handlers.admin;
import ca.szc.keratin.bot.annotation.HandlerContainer;
import ca.szc.keratin.core.event.message.recieve.ReceivePrivmsg;
import com.oldterns.vilebot.db.LogDB;
import net.engio.mbassy.listener.Handler;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by eunderhi on 18/08/15.
*/
@HandlerContainer
public class GetLog {
private static final Pattern showLog = Pattern.compile("^!showLog$");
private static final Pattern deleteLog = Pattern.compile("^!deleteLog$");
@Handler
private void getLog(ReceivePrivmsg event) {
String text = event.getText();
boolean showLogMatches = showLog.matcher(text).matches();
boolean deleteLogMatches = deleteLog.matcher(text).matches();
if (showLogMatches) {
event.reply(LogDB.getLog());
}
else if(deleteLogMatches) {
LogDB.deleteLog();
}
}
}
## Instruction:
Make showLog and deleteLog admin commands
## Code After:
package com.oldterns.vilebot.handlers.admin;
import ca.szc.keratin.bot.annotation.HandlerContainer;
import ca.szc.keratin.core.event.message.recieve.ReceivePrivmsg;
import com.oldterns.vilebot.db.GroupDB;
import com.oldterns.vilebot.db.LogDB;
import com.oldterns.vilebot.util.Sessions;
import net.engio.mbassy.listener.Handler;
import java.util.regex.Pattern;
/**
* Created by eunderhi on 18/08/15.
*/
@HandlerContainer
public class GetLog {
private static final Pattern showLog = Pattern.compile("!admin showLog$");
private static final Pattern deleteLog = Pattern.compile("!admin deleteLog$");
@Handler
private void getLog(ReceivePrivmsg event) {
String text = event.getText();
String sender = event.getSender();
String username = Sessions.getSession(sender);
boolean showLogMatches = showLog.matcher(text).matches();
boolean deleteLogMatches = deleteLog.matcher(text).matches();
if(GroupDB.isAdmin(username)) {
if (showLogMatches) {
event.reply(LogDB.getLog());
} else if (deleteLogMatches) {
LogDB.deleteLog();
}
}
}
}
|
...
import ca.szc.keratin.bot.annotation.HandlerContainer;
import ca.szc.keratin.core.event.message.recieve.ReceivePrivmsg;
import com.oldterns.vilebot.db.GroupDB;
import com.oldterns.vilebot.db.LogDB;
import com.oldterns.vilebot.util.Sessions;
import net.engio.mbassy.listener.Handler;
import java.util.regex.Pattern;
/**
...
@HandlerContainer
public class GetLog {
private static final Pattern showLog = Pattern.compile("!admin showLog$");
private static final Pattern deleteLog = Pattern.compile("!admin deleteLog$");
@Handler
private void getLog(ReceivePrivmsg event) {
String text = event.getText();
String sender = event.getSender();
String username = Sessions.getSession(sender);
boolean showLogMatches = showLog.matcher(text).matches();
boolean deleteLogMatches = deleteLog.matcher(text).matches();
if(GroupDB.isAdmin(username)) {
if (showLogMatches) {
event.reply(LogDB.getLog());
} else if (deleteLogMatches) {
LogDB.deleteLog();
}
}
}
}
...
|
43bae84b1359d56ad150b49b38c2f8d400b05af2
|
opps/core/cache/managers.py
|
opps/core/cache/managers.py
|
from django.db import models
from django.core.cache import cache
from django.conf import settings
class CacheManager(models.Manager):
def __cache_key(self, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
self.model._meta.db_table,
id)
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = self.__cache_key(id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = self.__cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
|
from django.db import models
from django.core.cache import cache
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
|
Fix cache key set, on core cache
|
Fix cache key set, on core cache
|
Python
|
mit
|
jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,opps/opps
|
python
|
## Code Before:
from django.db import models
from django.core.cache import cache
from django.conf import settings
class CacheManager(models.Manager):
def __cache_key(self, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
self.model._meta.db_table,
id)
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = self.__cache_key(id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = self.__cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
## Instruction:
Fix cache key set, on core cache
## Code After:
from django.db import models
from django.core.cache import cache
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
|
...
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
...
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
...
|
ac04cd5301d6aa4788fbd2d6bdaeb207f77a489e
|
alfred_listener/views.py
|
alfred_listener/views.py
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository()
repository.name = hook_data['repo_name']
repository.user = hook_data['repo_user']
repository.url = hook_data['repo_url']
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit()
commit.repository_id = repository.id
commit.hash = hook_data['hash']
commit.ref = hook_data['ref']
commit.compare_url = hook_data['compare_url']
commit.committer_name = hook_data['committer_name']
commit.committer_email = hook_data['committer_email']
commit.message = hook_data['message']
db.session.add(commit)
db.session.commit()
return 'OK'
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
Change the way to instaniate models
|
Change the way to instaniate models
|
Python
|
isc
|
alfredhq/alfred-listener
|
python
|
## Code Before:
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository()
repository.name = hook_data['repo_name']
repository.user = hook_data['repo_user']
repository.url = hook_data['repo_url']
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit()
commit.repository_id = repository.id
commit.hash = hook_data['hash']
commit.ref = hook_data['ref']
commit.compare_url = hook_data['compare_url']
commit.committer_name = hook_data['committer_name']
commit.committer_email = hook_data['committer_email']
commit.message = hook_data['message']
db.session.add(commit)
db.session.commit()
return 'OK'
## Instruction:
Change the way to instaniate models
## Code After:
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
...
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
...
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
...
|
64c684b2adf44decf19f60e801fc2e280cfc8342
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
# Basic package information.
name = 'prestapyt',
use_scm_version=True,
# Packaging options.
include_package_data = True,
# Package dependencies.
install_requires = ['requests','future'],
setup_requires=[
'setuptools_scm',
],
# Metadata for PyPI.
author = 'Guewen Baconnier',
author_email = '[email protected]',
license = 'GNU AGPL-3',
url = 'http://github.com/prestapyt/prestapyt',
packages=['prestapyt'],
keywords = 'prestashop api client rest',
description = 'A library to access Prestashop Web Service from Python.',
long_description = read('README.md'),
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Internet'
]
)
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
# Basic package information.
name = 'prestapyt',
use_scm_version=True,
# Packaging options.
include_package_data = True,
# Package dependencies.
install_requires = ['requests','future'],
setup_requires=[
'setuptools_scm',
],
# Metadata for PyPI.
author = 'Guewen Baconnier',
author_email = '[email protected]',
license = 'GNU AGPL-3',
url = 'http://github.com/prestapyt/prestapyt',
packages=['prestapyt'],
keywords = 'prestashop api client rest',
description = 'A library to access Prestashop Web Service from Python.',
long_description_content_type='text/markdown',
long_description = read('README.md'),
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Internet'
]
)
|
Set long description content type to be pypi friendly
|
Set long description content type to be pypi friendly
|
Python
|
agpl-3.0
|
prestapyt/prestapyt,prestapyt/prestapyt
|
python
|
## Code Before:
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
# Basic package information.
name = 'prestapyt',
use_scm_version=True,
# Packaging options.
include_package_data = True,
# Package dependencies.
install_requires = ['requests','future'],
setup_requires=[
'setuptools_scm',
],
# Metadata for PyPI.
author = 'Guewen Baconnier',
author_email = '[email protected]',
license = 'GNU AGPL-3',
url = 'http://github.com/prestapyt/prestapyt',
packages=['prestapyt'],
keywords = 'prestashop api client rest',
description = 'A library to access Prestashop Web Service from Python.',
long_description = read('README.md'),
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Internet'
]
)
## Instruction:
Set long description content type to be pypi friendly
## Code After:
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
# Basic package information.
name = 'prestapyt',
use_scm_version=True,
# Packaging options.
include_package_data = True,
# Package dependencies.
install_requires = ['requests','future'],
setup_requires=[
'setuptools_scm',
],
# Metadata for PyPI.
author = 'Guewen Baconnier',
author_email = '[email protected]',
license = 'GNU AGPL-3',
url = 'http://github.com/prestapyt/prestapyt',
packages=['prestapyt'],
keywords = 'prestashop api client rest',
description = 'A library to access Prestashop Web Service from Python.',
long_description_content_type='text/markdown',
long_description = read('README.md'),
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Internet'
]
)
|
# ... existing code ...
packages=['prestapyt'],
keywords = 'prestashop api client rest',
description = 'A library to access Prestashop Web Service from Python.',
long_description_content_type='text/markdown',
long_description = read('README.md'),
classifiers = [
'Development Status :: 4 - Beta',
# ... rest of the code ...
|
c9ce2d0c0428f1055b9573b617fcc817abe95d47
|
src/main/java/net/mcft/copy/betterstorage/item/tile/ItemPresent.java
|
src/main/java/net/mcft/copy/betterstorage/item/tile/ItemPresent.java
|
package net.mcft.copy.betterstorage.item.tile;
import java.util.List;
import net.mcft.copy.betterstorage.tile.entity.TileEntityPresent;
import net.mcft.copy.betterstorage.utils.StackUtils;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class ItemPresent extends ItemCardboardBox {
public ItemPresent(Block block) {
super(block);
}
@Override
public EnumRarity getRarity(ItemStack stack) { return EnumRarity.uncommon; }
@Override
public boolean showDurabilityBar(ItemStack stack) { return false; }
@Override
@SideOnly(Side.CLIENT)
public int getColorFromItemStack(ItemStack stack, int renderPass) { return 0xFFFFFF; }
@Override
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean advancedTooltips) {
String nameTag = StackUtils.get(stack, null, TileEntityPresent.TAG_NAMETAG);
if (nameTag != null) list.add("for " + nameTag);
}
}
|
package net.mcft.copy.betterstorage.item.tile;
import java.util.List;
import net.mcft.copy.betterstorage.tile.entity.TileEntityPresent;
import net.mcft.copy.betterstorage.utils.StackUtils;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class ItemPresent extends ItemCardboardBox {
public ItemPresent(Block block) {
super(block);
}
@Override
public EnumRarity getRarity(ItemStack stack) { return EnumRarity.uncommon; }
@Override
public boolean showDurabilityBar(ItemStack stack) { return false; }
@Override
@SideOnly(Side.CLIENT)
public int getColorFromItemStack(ItemStack stack, int renderPass) { return 0xFFFFFF; }
@Override
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean advancedTooltips) {
String nameTag = StackUtils.get(stack, null, TileEntityPresent.TAG_NAMETAG);
if (nameTag != null) list.add("for " + nameTag);
}
@Override
public boolean canBeStoredInContainerItem(ItemStack item) { return true; }
}
|
Allow presents to be placed inside container items
|
Allow presents to be placed inside container items
|
Java
|
mit
|
Bunsan/BetterStorage,KingDarkLord/BetterStorage,RX14/BetterStorage,skyem123/BetterStorage,Adaptivity/BetterStorage,copygirl/BetterStorage,TehStoneMan/BetterStorageToo,TehStoneMan/BetterStorage,AnodeCathode/BetterStorage,CannibalVox/BetterStorage
|
java
|
## Code Before:
package net.mcft.copy.betterstorage.item.tile;
import java.util.List;
import net.mcft.copy.betterstorage.tile.entity.TileEntityPresent;
import net.mcft.copy.betterstorage.utils.StackUtils;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class ItemPresent extends ItemCardboardBox {
public ItemPresent(Block block) {
super(block);
}
@Override
public EnumRarity getRarity(ItemStack stack) { return EnumRarity.uncommon; }
@Override
public boolean showDurabilityBar(ItemStack stack) { return false; }
@Override
@SideOnly(Side.CLIENT)
public int getColorFromItemStack(ItemStack stack, int renderPass) { return 0xFFFFFF; }
@Override
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean advancedTooltips) {
String nameTag = StackUtils.get(stack, null, TileEntityPresent.TAG_NAMETAG);
if (nameTag != null) list.add("for " + nameTag);
}
}
## Instruction:
Allow presents to be placed inside container items
## Code After:
package net.mcft.copy.betterstorage.item.tile;
import java.util.List;
import net.mcft.copy.betterstorage.tile.entity.TileEntityPresent;
import net.mcft.copy.betterstorage.utils.StackUtils;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class ItemPresent extends ItemCardboardBox {
public ItemPresent(Block block) {
super(block);
}
@Override
public EnumRarity getRarity(ItemStack stack) { return EnumRarity.uncommon; }
@Override
public boolean showDurabilityBar(ItemStack stack) { return false; }
@Override
@SideOnly(Side.CLIENT)
public int getColorFromItemStack(ItemStack stack, int renderPass) { return 0xFFFFFF; }
@Override
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean advancedTooltips) {
String nameTag = StackUtils.get(stack, null, TileEntityPresent.TAG_NAMETAG);
if (nameTag != null) list.add("for " + nameTag);
}
@Override
public boolean canBeStoredInContainerItem(ItemStack item) { return true; }
}
|
...
if (nameTag != null) list.add("for " + nameTag);
}
@Override
public boolean canBeStoredInContainerItem(ItemStack item) { return true; }
}
...
|
4bb3e5fe554bc5d9e54924456a8711c3e40e6191
|
hard_way/ex15.c
|
hard_way/ex15.c
|
int main(int argc, char *argv[]){
//create two arrays we care about
int ages[] = {23,55,15,34,78,12};
char *names[] = {
"Feinb", "Fhilp", "Wastan", "Wustak","Henris","Abkar"
};
//safely get the size of ages
int count = sizeof(ages) / sizeof(int);
int i = 0;
//first way using indexing
for(i = 0; i < count; i++){
printf("%s had lived for %d years.\n", names[i], ages[i]);
}
printf("---\n");
return 0;
}
|
int main(int argc, char *argv[]){
//create two arrays we care about
int ages[] = {23,55,15,34,78,12};
char *names[] = {
"Feinb", "Fhilp", "Wastan", "Wustak","Henris","Abkar"
};
//safely get the size of ages
int count = sizeof(ages) / sizeof(int);
int i = 0;
//first way using indexing
for(i = 0; i < count; i++){
printf("%s has lived for %d years.\n", names[i], ages[i]);
}
printf("---\n");
//setup pointers to the start of the arrays
int *cur_age = ages;
char **cur_name = names;
// second way using pointers
for(i = 0; i < count; i++){
printf("%s is %d years old.\n",
*(cur_name+i), *(cur_age+i));
}
printf("---\n");
return 0;
}
|
Use pointers to start of arrays
|
Use pointers to start of arrays
|
C
|
mit
|
thewazir/learning_c
|
c
|
## Code Before:
int main(int argc, char *argv[]){
//create two arrays we care about
int ages[] = {23,55,15,34,78,12};
char *names[] = {
"Feinb", "Fhilp", "Wastan", "Wustak","Henris","Abkar"
};
//safely get the size of ages
int count = sizeof(ages) / sizeof(int);
int i = 0;
//first way using indexing
for(i = 0; i < count; i++){
printf("%s had lived for %d years.\n", names[i], ages[i]);
}
printf("---\n");
return 0;
}
## Instruction:
Use pointers to start of arrays
## Code After:
int main(int argc, char *argv[]){
//create two arrays we care about
int ages[] = {23,55,15,34,78,12};
char *names[] = {
"Feinb", "Fhilp", "Wastan", "Wustak","Henris","Abkar"
};
//safely get the size of ages
int count = sizeof(ages) / sizeof(int);
int i = 0;
//first way using indexing
for(i = 0; i < count; i++){
printf("%s has lived for %d years.\n", names[i], ages[i]);
}
printf("---\n");
//setup pointers to the start of the arrays
int *cur_age = ages;
char **cur_name = names;
// second way using pointers
for(i = 0; i < count; i++){
printf("%s is %d years old.\n",
*(cur_name+i), *(cur_age+i));
}
printf("---\n");
return 0;
}
|
# ... existing code ...
//first way using indexing
for(i = 0; i < count; i++){
printf("%s has lived for %d years.\n", names[i], ages[i]);
}
printf("---\n");
//setup pointers to the start of the arrays
int *cur_age = ages;
char **cur_name = names;
// second way using pointers
for(i = 0; i < count; i++){
printf("%s is %d years old.\n",
*(cur_name+i), *(cur_age+i));
}
printf("---\n");
# ... rest of the code ...
|
cf0110f2b1adc8fbf4b8305841961d67da33f8c7
|
pybo/bayesopt/policies/thompson.py
|
pybo/bayesopt/policies/thompson.py
|
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# use this to simplify (slightly) the Thompson implementation with sampled
# models.
from collections import deque
# local imports
from ..utils import params
# exported symbols
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n).get
|
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
|
Fix Thompson to pay attention to the RNG.
|
Fix Thompson to pay attention to the RNG.
|
Python
|
bsd-2-clause
|
mwhoffman/pybo,jhartford/pybo
|
python
|
## Code Before:
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# use this to simplify (slightly) the Thompson implementation with sampled
# models.
from collections import deque
# local imports
from ..utils import params
# exported symbols
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n).get
## Instruction:
Fix Thompson to pay attention to the RNG.
## Code After:
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
|
...
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
...
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
...
|
862f877cdcdef7aa4a853b2cce8eed2d7ba95fdc
|
providers/org/cogprints/apps.py
|
providers/org/cogprints/apps.py
|
from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.org.cogprints'
version = '0.0.1'
title = 'cogprints'
long_title = 'Cognitive Sciences ePrint Archive'
home_page = 'http://www.cogprints.org/'
url = 'http://cogprints.org/cgi/oai2'
|
from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.org.cogprints'
version = '0.0.1'
title = 'cogprints'
long_title = 'Cognitive Sciences ePrint Archive'
home_page = 'http://www.cogprints.org/'
url = 'http://cogprints.org/cgi/oai2'
emitted_type = 'preprint'
|
Update cogprints to emit preprints
|
Update cogprints to emit preprints
|
Python
|
apache-2.0
|
aaxelb/SHARE,CenterForOpenScience/SHARE,CenterForOpenScience/SHARE,zamattiac/SHARE,laurenbarker/SHARE,laurenbarker/SHARE,aaxelb/SHARE,zamattiac/SHARE,aaxelb/SHARE,zamattiac/SHARE,laurenbarker/SHARE,CenterForOpenScience/SHARE
|
python
|
## Code Before:
from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.org.cogprints'
version = '0.0.1'
title = 'cogprints'
long_title = 'Cognitive Sciences ePrint Archive'
home_page = 'http://www.cogprints.org/'
url = 'http://cogprints.org/cgi/oai2'
## Instruction:
Update cogprints to emit preprints
## Code After:
from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.org.cogprints'
version = '0.0.1'
title = 'cogprints'
long_title = 'Cognitive Sciences ePrint Archive'
home_page = 'http://www.cogprints.org/'
url = 'http://cogprints.org/cgi/oai2'
emitted_type = 'preprint'
|
# ... existing code ...
long_title = 'Cognitive Sciences ePrint Archive'
home_page = 'http://www.cogprints.org/'
url = 'http://cogprints.org/cgi/oai2'
emitted_type = 'preprint'
# ... rest of the code ...
|
ffd62acfee0ef0d1915854ff11c3d8cb423ff7a6
|
Simperium/src/support/java/com/simperium/test/MockAuthResponseListener.java
|
Simperium/src/support/java/com/simperium/test/MockAuthResponseListener.java
|
package com.simperium.test;
import com.simperium.client.AuthException;
import com.simperium.client.AuthResponseListener;
import com.simperium.client.User;
public class MockAuthResponseListener implements AuthResponseListener {
public AuthException exception;
public User user;
public boolean success = false, failure = false;
@Override
public void onSuccess(User user, String userId, String token){
success = true;
this.user = user;
}
@Override
public void onFailure(User user, AuthException exception){
failure = true;
this.user = user;
this.exception = exception;
}
}
|
package com.simperium.test;
import com.simperium.client.AuthException;
import com.simperium.client.AuthProvider;
import com.simperium.client.AuthResponseListener;
import com.simperium.client.User;
public class MockAuthResponseListener implements AuthResponseListener {
public AuthException exception;
public User user;
public boolean success = false, failure = false;
@Override
public void onSuccess(User user, String userId, String token, AuthProvider provider){
success = true;
this.user = user;
}
@Override
public void onFailure(User user, AuthException exception){
failure = true;
this.user = user;
this.exception = exception;
}
}
|
Add auth provider parameter to on success callback in mock auth response listener
|
Add auth provider parameter to on success callback in mock auth response listener
|
Java
|
mit
|
Simperium/simperium-android,Simperium/simperium-android
|
java
|
## Code Before:
package com.simperium.test;
import com.simperium.client.AuthException;
import com.simperium.client.AuthResponseListener;
import com.simperium.client.User;
public class MockAuthResponseListener implements AuthResponseListener {
public AuthException exception;
public User user;
public boolean success = false, failure = false;
@Override
public void onSuccess(User user, String userId, String token){
success = true;
this.user = user;
}
@Override
public void onFailure(User user, AuthException exception){
failure = true;
this.user = user;
this.exception = exception;
}
}
## Instruction:
Add auth provider parameter to on success callback in mock auth response listener
## Code After:
package com.simperium.test;
import com.simperium.client.AuthException;
import com.simperium.client.AuthProvider;
import com.simperium.client.AuthResponseListener;
import com.simperium.client.User;
public class MockAuthResponseListener implements AuthResponseListener {
public AuthException exception;
public User user;
public boolean success = false, failure = false;
@Override
public void onSuccess(User user, String userId, String token, AuthProvider provider){
success = true;
this.user = user;
}
@Override
public void onFailure(User user, AuthException exception){
failure = true;
this.user = user;
this.exception = exception;
}
}
|
# ... existing code ...
package com.simperium.test;
import com.simperium.client.AuthException;
import com.simperium.client.AuthProvider;
import com.simperium.client.AuthResponseListener;
import com.simperium.client.User;
# ... modified code ...
public boolean success = false, failure = false;
@Override
public void onSuccess(User user, String userId, String token, AuthProvider provider){
success = true;
this.user = user;
}
# ... rest of the code ...
|
c47c043e76ac037456b8e966a5f9d60a151e3120
|
elodie/geolocation.py
|
elodie/geolocation.py
|
from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
if not path.exists('./config.ini'):
return None
config = ConfigParser()
config.read('./config.ini')
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
|
from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
config_file = '%s/config.ini' % path.dirname(path.dirname(path.abspath(__file__)))
if not path.exists(config_file):
return None
config = ConfigParser()
config.read(config_file)
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
|
Use absolute path for config file so it works with apps like Hazel
|
Use absolute path for config file so it works with apps like Hazel
|
Python
|
apache-2.0
|
zserg/elodie,zingo/elodie,jmathai/elodie,jmathai/elodie,zingo/elodie,zserg/elodie,zserg/elodie,jmathai/elodie,zserg/elodie,jmathai/elodie,zingo/elodie
|
python
|
## Code Before:
from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
if not path.exists('./config.ini'):
return None
config = ConfigParser()
config.read('./config.ini')
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
## Instruction:
Use absolute path for config file so it works with apps like Hazel
## Code After:
from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
config_file = '%s/config.ini' % path.dirname(path.dirname(path.abspath(__file__)))
if not path.exists(config_file):
return None
config = ConfigParser()
config.read(config_file)
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
|
# ... existing code ...
if(lat is None or lon is None):
return None
config_file = '%s/config.ini' % path.dirname(path.dirname(path.abspath(__file__)))
if not path.exists(config_file):
return None
config = ConfigParser()
config.read(config_file)
if('MapQuest' not in config.sections()):
return None
# ... rest of the code ...
|
b51c8d107b6da5d6d6b0cc5a1db525bff856a1cf
|
AgileCLU/tests/__init__.py
|
AgileCLU/tests/__init__.py
|
import AgileCLU
import unittest
class AgileCLUTestCase(unittest.TestCase):
def setup(self):
self.agileclu = AgileCLU()
def test_epwbasekey(self):
return
def test_e_pw_hash(self):
return
def test_e_pw_dehash(self):
return
if __name__ == "__main__":
unittest.main()
|
import unittest
import AgileCLU
class AgileCLUTestCase(unittest.TestCase):
def test_epwbasekey(self):
hash=AgileCLU.epwbasekey('test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'AbiDicIBaEuvafIuegJWVP8j')
def test_e_pw_hash(self):
hash=AgileCLU.e_pw_hash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'jyH0M5b9OyM=')
def test_e_pw_dehash(self):
hash=AgileCLU.e_pw_dehash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, '87654321')
if __name__ == "__main__":
unittest.main()
|
Add basic asserts for hashing helper functions.
|
Add basic asserts for hashing helper functions.
|
Python
|
bsd-2-clause
|
wylieswanson/AgileCLU
|
python
|
## Code Before:
import AgileCLU
import unittest
class AgileCLUTestCase(unittest.TestCase):
def setup(self):
self.agileclu = AgileCLU()
def test_epwbasekey(self):
return
def test_e_pw_hash(self):
return
def test_e_pw_dehash(self):
return
if __name__ == "__main__":
unittest.main()
## Instruction:
Add basic asserts for hashing helper functions.
## Code After:
import unittest
import AgileCLU
class AgileCLUTestCase(unittest.TestCase):
def test_epwbasekey(self):
hash=AgileCLU.epwbasekey('test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'AbiDicIBaEuvafIuegJWVP8j')
def test_e_pw_hash(self):
hash=AgileCLU.e_pw_hash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'jyH0M5b9OyM=')
def test_e_pw_dehash(self):
hash=AgileCLU.e_pw_dehash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, '87654321')
if __name__ == "__main__":
unittest.main()
|
...
import unittest
import AgileCLU
class AgileCLUTestCase(unittest.TestCase):
def test_epwbasekey(self):
hash=AgileCLU.epwbasekey('test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'AbiDicIBaEuvafIuegJWVP8j')
def test_e_pw_hash(self):
hash=AgileCLU.e_pw_hash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'jyH0M5b9OyM=')
def test_e_pw_dehash(self):
hash=AgileCLU.e_pw_dehash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, '87654321')
if __name__ == "__main__":
unittest.main()
...
|
a58a6b897370e82aa3625c36a00e2de74c16ab6c
|
cortex/__init__.py
|
cortex/__init__.py
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, *args, **kwargs):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__getattr__(*args, **kwargs)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
Fix up the deprecate surfs object
|
Fix up the deprecate surfs object
|
Python
|
bsd-2-clause
|
gallantlab/pycortex,gallantlab/pycortex,CVML/pycortex,CVML/pycortex,smerdis/pycortex,smerdis/pycortex,smerdis/pycortex,gallantlab/pycortex,gallantlab/pycortex,gallantlab/pycortex,CVML/pycortex,smerdis/pycortex,CVML/pycortex,CVML/pycortex,smerdis/pycortex
|
python
|
## Code Before:
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, *args, **kwargs):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__getattr__(*args, **kwargs)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
## Instruction:
Fix up the deprecate surfs object
## Code After:
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
...
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
...
|
3a37211f09c000f0fcb41ca076cb98b90bfae030
|
eb_sqs/urls.py
|
eb_sqs/urls.py
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
app_name = 'eb_sqs'
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
|
Remove unnecessary global variable assignment
|
Remove unnecessary global variable assignment
|
Python
|
mit
|
cuda-networks/django-eb-sqs,cuda-networks/django-eb-sqs
|
python
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
app_name = 'eb_sqs'
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
## Instruction:
Remove unnecessary global variable assignment
## Code After:
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
|
// ... existing code ...
from eb_sqs.views import process_task
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
// ... rest of the code ...
|
135653c885225b845addfa14f9a902e80c40b385
|
src/cs437/som/neighborhood/HyperbolicNeighborhoodWidthFunction.java
|
src/cs437/som/neighborhood/HyperbolicNeighborhoodWidthFunction.java
|
package cs437.som.neighborhood;
import cs437.som.NeightborhoodWidthFunction;
/**
* Hyperbolic neighborhood width strategy for self-organizing map.
*/
public class HyperbolicNeighborhoodWidthFunction implements NeightborhoodWidthFunction {
private double expectedIterations;
public void setExpectedIterations(int expectedIterations) {
this.expectedIterations = expectedIterations;
}
public double neighborhoodWidth(int iteration) {
return expectedIterations / (expectedIterations + iteration);
}
@Override
public String toString() {
return "HyperbolicNeighborhoodWidthFunction";
}
}
|
package cs437.som.neighborhood;
import cs437.som.NeightborhoodWidthFunction;
/**
* Hyperbolic neighborhood width strategy for self-organizing map.
*
* The exact behavior follows the formula:
* w_i / (t + t_max)
* where
* w_i is the initial width of the neighborhood
* t is the current iteration
* t_max is the maximum expected iteration
*/
public class HyperbolicNeighborhoodWidthFunction implements NeightborhoodWidthFunction {
private double expectedIterations = 0.0;
public void setExpectedIterations(int expectedIterations) {
this.expectedIterations = expectedIterations;
}
public double neighborhoodWidth(int iteration) {
return expectedIterations / (expectedIterations + iteration);
}
@Override
public String toString() {
return "HyperbolicNeighborhoodWidthFunction";
}
}
|
Add formula comment to hyperbolic NW class.
|
Add formula comment to hyperbolic NW class.
|
Java
|
bsd-3-clause
|
gamma9mu/SOMa
|
java
|
## Code Before:
package cs437.som.neighborhood;
import cs437.som.NeightborhoodWidthFunction;
/**
* Hyperbolic neighborhood width strategy for self-organizing map.
*/
public class HyperbolicNeighborhoodWidthFunction implements NeightborhoodWidthFunction {
private double expectedIterations;
public void setExpectedIterations(int expectedIterations) {
this.expectedIterations = expectedIterations;
}
public double neighborhoodWidth(int iteration) {
return expectedIterations / (expectedIterations + iteration);
}
@Override
public String toString() {
return "HyperbolicNeighborhoodWidthFunction";
}
}
## Instruction:
Add formula comment to hyperbolic NW class.
## Code After:
package cs437.som.neighborhood;
import cs437.som.NeightborhoodWidthFunction;
/**
* Hyperbolic neighborhood width strategy for self-organizing map.
*
* The exact behavior follows the formula:
* w_i / (t + t_max)
* where
* w_i is the initial width of the neighborhood
* t is the current iteration
* t_max is the maximum expected iteration
*/
public class HyperbolicNeighborhoodWidthFunction implements NeightborhoodWidthFunction {
private double expectedIterations = 0.0;
public void setExpectedIterations(int expectedIterations) {
this.expectedIterations = expectedIterations;
}
public double neighborhoodWidth(int iteration) {
return expectedIterations / (expectedIterations + iteration);
}
@Override
public String toString() {
return "HyperbolicNeighborhoodWidthFunction";
}
}
|
# ... existing code ...
/**
* Hyperbolic neighborhood width strategy for self-organizing map.
*
* The exact behavior follows the formula:
* w_i / (t + t_max)
* where
* w_i is the initial width of the neighborhood
* t is the current iteration
* t_max is the maximum expected iteration
*/
public class HyperbolicNeighborhoodWidthFunction implements NeightborhoodWidthFunction {
private double expectedIterations = 0.0;
public void setExpectedIterations(int expectedIterations) {
this.expectedIterations = expectedIterations;
# ... rest of the code ...
|
b9143462c004af7d18a66fa92ad94585468751b9
|
IndexedRedis/fields/classic.py
|
IndexedRedis/fields/classic.py
|
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
|
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes, encoded_str_type
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
|
Change IRFieldClassic to use 'encoded_str_type'
|
Change IRFieldClassic to use 'encoded_str_type'
|
Python
|
lgpl-2.1
|
kata198/indexedredis,kata198/indexedredis
|
python
|
## Code Before:
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
## Instruction:
Change IRFieldClassic to use 'encoded_str_type'
## Code After:
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes, encoded_str_type
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
|
# ... existing code ...
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes, encoded_str_type
class IRClassicField(IRField):
'''
# ... modified code ...
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# ... rest of the code ...
|
a09edcdf11c0d6c6b43cbff5029ac8cfb5741170
|
application.py
|
application.py
|
import os
from app import create_app, db
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Update to run on port 5000
|
Update to run on port 5000
For development we will want to run multiple apps, so they should each bind to a different port number.
The default port is 5000 anyway, but we should state the port explicitly in the code which is why I've added it here.
|
Python
|
mit
|
mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
python
|
## Code Before:
import os
from app import create_app, db
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
## Instruction:
Update to run on port 5000
For development we will want to run multiple apps, so they should each bind to a different port number.
The default port is 5000 anyway, but we should state the port explicitly in the code which is why I've added it here.
## Code After:
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
// ... existing code ...
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
// ... rest of the code ...
|
cb6d0ea6c05eb62fafe97ac13d5665cb00b2db3c
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
package_dir={'spherical_functions': ''},
packages=['spherical_functions',],
)
|
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
packages=['spherical_functions',],
package_dir={'spherical_functions': ''},
package_data={'spherical_functions': ['Wigner_coefficients.npy',
'binomial_coefficients.npy',
'ladder_operator_coefficients.npy']},
)
|
Copy data files for numpy
|
Copy data files for numpy
|
Python
|
mit
|
moble/spherical_functions
|
python
|
## Code Before:
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
package_dir={'spherical_functions': ''},
packages=['spherical_functions',],
)
## Instruction:
Copy data files for numpy
## Code After:
from distutils.core import setup
setup(name='spherical_functions',
version='1.0',
description='Python/numba implementation of Wigner D Matrices, spin-weighted spherical harmonics, and associated functions',
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
packages=['spherical_functions',],
package_dir={'spherical_functions': ''},
package_data={'spherical_functions': ['Wigner_coefficients.npy',
'binomial_coefficients.npy',
'ladder_operator_coefficients.npy']},
)
|
...
author='Michael Boyle',
# author_email='',
url='https://github.com/moble/spherical_functions',
packages=['spherical_functions',],
package_dir={'spherical_functions': ''},
package_data={'spherical_functions': ['Wigner_coefficients.npy',
'binomial_coefficients.npy',
'ladder_operator_coefficients.npy']},
)
...
|
ec58d9e3ac445e96413d10957c2e2488a695e784
|
test/CodeGen/clear_cache.c
|
test/CodeGen/clear_cache.c
|
// RUN: %clang_cc1 -emit-llvm %s -o - | FileCheck %s
char buffer[32] = "This is a largely unused buffer";
// __builtin___clear_cache always maps to @llvm.clear_cache, but what
// each back-end produces is different, and this is tested in LLVM
int main() {
__builtin___clear_cache(buffer, buffer+32);
// CHECK: @llvm.clear_cache(i8* getelementptr {{.*}}, i8* getelementptr {{.*}} (i8* getelementptr {{.*}} 32))
return 0;
}
|
// RUN: %clang_cc1 -emit-llvm %s -o - | FileCheck %s
char buffer[32] = "This is a largely unused buffer";
// __builtin___clear_cache always maps to @llvm.clear_cache, but what
// each back-end produces is different, and this is tested in LLVM
int main() {
__builtin___clear_cache(buffer, buffer+32);
// CHECK: @llvm.clear_cache(i8* getelementptr inbounds ({{.*}}, i8* getelementptr inbounds (i8* getelementptr inbounds ({{.*}} 32))
return 0;
}
|
Update test case to be compatible with auto-migration to new getelementptr syntax coming in the near future
|
Update test case to be compatible with auto-migration to new getelementptr syntax coming in the near future
The first change won't touch GEPOperators such as these, but the update
script only identifies them by the leading '(' after getelementptr or
'getelementptr inbounds', so update this test to at least have those
features to allow auto-migrating.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@229198 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang
|
c
|
## Code Before:
// RUN: %clang_cc1 -emit-llvm %s -o - | FileCheck %s
char buffer[32] = "This is a largely unused buffer";
// __builtin___clear_cache always maps to @llvm.clear_cache, but what
// each back-end produces is different, and this is tested in LLVM
int main() {
__builtin___clear_cache(buffer, buffer+32);
// CHECK: @llvm.clear_cache(i8* getelementptr {{.*}}, i8* getelementptr {{.*}} (i8* getelementptr {{.*}} 32))
return 0;
}
## Instruction:
Update test case to be compatible with auto-migration to new getelementptr syntax coming in the near future
The first change won't touch GEPOperators such as these, but the update
script only identifies them by the leading '(' after getelementptr or
'getelementptr inbounds', so update this test to at least have those
features to allow auto-migrating.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@229198 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 -emit-llvm %s -o - | FileCheck %s
char buffer[32] = "This is a largely unused buffer";
// __builtin___clear_cache always maps to @llvm.clear_cache, but what
// each back-end produces is different, and this is tested in LLVM
int main() {
__builtin___clear_cache(buffer, buffer+32);
// CHECK: @llvm.clear_cache(i8* getelementptr inbounds ({{.*}}, i8* getelementptr inbounds (i8* getelementptr inbounds ({{.*}} 32))
return 0;
}
|
// ... existing code ...
int main() {
__builtin___clear_cache(buffer, buffer+32);
// CHECK: @llvm.clear_cache(i8* getelementptr inbounds ({{.*}}, i8* getelementptr inbounds (i8* getelementptr inbounds ({{.*}} 32))
return 0;
}
// ... rest of the code ...
|
0d208865fc69e8f0696137da30df4b5115219d96
|
setup.py
|
setup.py
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.19',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='[email protected]',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.20',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='[email protected]',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
Read README as UTF-8, always
|
Read README as UTF-8, always
|
Python
|
mit
|
SectorLabs/django-postgres-extra
|
python
|
## Code Before:
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.19',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='[email protected]',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
## Instruction:
Read README as UTF-8, always
## Code After:
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.20',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='[email protected]',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
// ... existing code ...
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read()
setup(
name='django-postgres-extra',
version='1.20',
packages=find_packages(),
include_package_data=True,
license='MIT License',
// ... rest of the code ...
|
ab802204d84511765a701cad48e9e22dc4e84be1
|
tests/rules/conftest.py
|
tests/rules/conftest.py
|
import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True, scope="session")
def configured_cache():
cache.configure()
|
import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True)
def configured_cache():
if not cache.region.is_configured:
cache.configure()
yield
cache.region.invalidate()
|
Fix intermittent failures of test_guard_http_exception
|
Fix intermittent failures of test_guard_http_exception
Signed-off-by: Ryan Lerch <[email protected]>
|
Python
|
lgpl-2.1
|
fedora-infra/fmn,fedora-infra/fmn,fedora-infra/fmn,fedora-infra/fmn,fedora-infra/fmn
|
python
|
## Code Before:
import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True, scope="session")
def configured_cache():
cache.configure()
## Instruction:
Fix intermittent failures of test_guard_http_exception
Signed-off-by: Ryan Lerch <[email protected]>
## Code After:
import pytest
from fmn.rules.cache import cache
@pytest.fixture(autouse=True)
def configured_cache():
if not cache.region.is_configured:
cache.configure()
yield
cache.region.invalidate()
|
...
from fmn.rules.cache import cache
@pytest.fixture(autouse=True)
def configured_cache():
if not cache.region.is_configured:
cache.configure()
yield
cache.region.invalidate()
...
|
e050411af032f62ce4e4f5775b13607e3c754e4a
|
meta.h
|
meta.h
|
struct Meta {
char const* title;
char const* artist;
};
struct Meta retrieve_meta(AVFormatContext* ctx);
|
struct Meta {
const char const* title;
const char const* artist;
};
struct Meta retrieve_meta(AVFormatContext* ctx);
|
Fix incorrect const pointer marking.
|
Fix incorrect const pointer marking.
|
C
|
mit
|
bakape/thumbnailer,bakape/thumbnailer
|
c
|
## Code Before:
struct Meta {
char const* title;
char const* artist;
};
struct Meta retrieve_meta(AVFormatContext* ctx);
## Instruction:
Fix incorrect const pointer marking.
## Code After:
struct Meta {
const char const* title;
const char const* artist;
};
struct Meta retrieve_meta(AVFormatContext* ctx);
|
# ... existing code ...
struct Meta {
const char const* title;
const char const* artist;
};
struct Meta retrieve_meta(AVFormatContext* ctx);
# ... rest of the code ...
|
610c36d3e6b6f9ef92cd9729f180415a3369ceae
|
components/clk/src/clk.c
|
components/clk/src/clk.c
|
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the BSD 2-Clause license. Note that NO WARRANTY is provided.
* See "LICENSE_BSD2.txt" for details.
*
* @TAG(NICTA_BSD)
*/
#include <stdint.h>
#include <platsupport/clock.h>
#include <clk.h>
clock_sys_t clock_sys;
unsigned int clktree_get_spi1_freq(void){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_get_freq(clk);
}
unsigned int clktree_set_spi1_freq(unsigned int rate){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_set_freq(clk, rate);
}
void clktree__init(void){
int err;
err = exynos5_clock_sys_init(cmu_cpu_clk,
cmu_core_clk,
NULL,
NULL,
cmu_top_clk,
NULL,
NULL,
NULL,
NULL,
&clock_sys);
assert(!err);
if(err){
printf("Failed to initialise clock tree\n");
}
}
|
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the BSD 2-Clause license. Note that NO WARRANTY is provided.
* See "LICENSE_BSD2.txt" for details.
*
* @TAG(NICTA_BSD)
*/
#include <stdint.h>
#include <platsupport/clock.h>
#include <clk.h>
clock_sys_t clock_sys;
unsigned int clktree_get_spi1_freq(void){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_get_freq(clk);
}
unsigned int clktree_set_spi1_freq(unsigned int rate){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_set_freq(clk, rate);
}
void clktree__init(void){
int err;
err = exynos5_clock_sys_init(cmu_cpu_clk,
cmu_core_clk,
NULL,
NULL,
cmu_top_clk,
NULL,
NULL,
NULL,
NULL,
NULL,
&clock_sys);
assert(!err);
if(err){
printf("Failed to initialise clock tree\n");
}
}
|
Fix due to the changes in libplatsupport.
|
Fix due to the changes in libplatsupport.
|
C
|
bsd-2-clause
|
smaccm/camkes-apps-DARPA--devel
|
c
|
## Code Before:
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the BSD 2-Clause license. Note that NO WARRANTY is provided.
* See "LICENSE_BSD2.txt" for details.
*
* @TAG(NICTA_BSD)
*/
#include <stdint.h>
#include <platsupport/clock.h>
#include <clk.h>
clock_sys_t clock_sys;
unsigned int clktree_get_spi1_freq(void){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_get_freq(clk);
}
unsigned int clktree_set_spi1_freq(unsigned int rate){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_set_freq(clk, rate);
}
void clktree__init(void){
int err;
err = exynos5_clock_sys_init(cmu_cpu_clk,
cmu_core_clk,
NULL,
NULL,
cmu_top_clk,
NULL,
NULL,
NULL,
NULL,
&clock_sys);
assert(!err);
if(err){
printf("Failed to initialise clock tree\n");
}
}
## Instruction:
Fix due to the changes in libplatsupport.
## Code After:
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the BSD 2-Clause license. Note that NO WARRANTY is provided.
* See "LICENSE_BSD2.txt" for details.
*
* @TAG(NICTA_BSD)
*/
#include <stdint.h>
#include <platsupport/clock.h>
#include <clk.h>
clock_sys_t clock_sys;
unsigned int clktree_get_spi1_freq(void){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_get_freq(clk);
}
unsigned int clktree_set_spi1_freq(unsigned int rate){
clk_t* clk;
clk = clk_get_clock(&clock_sys, CLK_SPI1);
return clk_set_freq(clk, rate);
}
void clktree__init(void){
int err;
err = exynos5_clock_sys_init(cmu_cpu_clk,
cmu_core_clk,
NULL,
NULL,
cmu_top_clk,
NULL,
NULL,
NULL,
NULL,
NULL,
&clock_sys);
assert(!err);
if(err){
printf("Failed to initialise clock tree\n");
}
}
|
// ... existing code ...
NULL,
NULL,
NULL,
NULL,
&clock_sys);
assert(!err);
if(err){
// ... rest of the code ...
|
6b7102f6fa369ebe8e4e4b471284834656a1fed4
|
Wrapping/Java/vtk/rendering/vtkAbstractEventInterceptor.java
|
Wrapping/Java/vtk/rendering/vtkAbstractEventInterceptor.java
|
package vtk.rendering;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.event.MouseWheelEvent;
/**
* This class implement vtkEventInterceptor with no event interception at all.
*
* @see {@link MouseMotionListener} {@link MouseListener} {@link MouseWheelListener}
* {@link KeyListener}
*
* @author Sebastien Jourdain - [email protected], Kitware Inc 2013
*/
public class vtkAbstractEventInterceptor implements vtkEventInterceptor {
@Override
public boolean keyPressed(KeyEvent e) {
return false;
}
@Override
public boolean keyReleased(KeyEvent e) {
return false;
}
@Override
public boolean keyTyped(KeyEvent e) {
return false;
}
@Override
public boolean mouseDragged(MouseEvent e) {
return false;
}
@Override
public boolean mouseMoved(MouseEvent e) {
return false;
}
@Override
public boolean mouseClicked(MouseEvent e) {
return false;
}
@Override
public boolean mouseEntered(MouseEvent e) {
return false;
}
@Override
public boolean mouseExited(MouseEvent e) {
return false;
}
@Override
public boolean mousePressed(MouseEvent e) {
return false;
}
@Override
public boolean mouseReleased(MouseEvent e) {
return false;
}
@Override
public boolean mouseWheelMoved(MouseWheelEvent e) {
return false;
}
}
|
package vtk.rendering;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.event.MouseWheelEvent;
/**
* This class implement vtkEventInterceptor with no event interception at all.
*
* @see {@link MouseMotionListener} {@link MouseListener} {@link MouseWheelListener}
* {@link KeyListener}
*
* @author Sebastien Jourdain - [email protected], Kitware Inc 2013
*/
public class vtkAbstractEventInterceptor implements vtkEventInterceptor {
public boolean keyPressed(KeyEvent e) {
return false;
}
public boolean keyReleased(KeyEvent e) {
return false;
}
public boolean keyTyped(KeyEvent e) {
return false;
}
public boolean mouseDragged(MouseEvent e) {
return false;
}
public boolean mouseMoved(MouseEvent e) {
return false;
}
public boolean mouseClicked(MouseEvent e) {
return false;
}
public boolean mouseEntered(MouseEvent e) {
return false;
}
public boolean mouseExited(MouseEvent e) {
return false;
}
public boolean mousePressed(MouseEvent e) {
return false;
}
public boolean mouseReleased(MouseEvent e) {
return false;
}
public boolean mouseWheelMoved(MouseWheelEvent e) {
return false;
}
}
|
Remove @Override annotation to prevent compilation issue on old Java compiler
|
Remove @Override annotation to prevent compilation issue on old Java compiler
Change-Id: I9ed0e8423f5ae028294d773799979865e0dd4e8f
|
Java
|
bsd-3-clause
|
berendkleinhaneveld/VTK,mspark93/VTK,berendkleinhaneveld/VTK,gram526/VTK,mspark93/VTK,candy7393/VTK,mspark93/VTK,berendkleinhaneveld/VTK,demarle/VTK,SimVascular/VTK,mspark93/VTK,sumedhasingla/VTK,hendradarwin/VTK,jmerkow/VTK,sumedhasingla/VTK,mspark93/VTK,hendradarwin/VTK,gram526/VTK,candy7393/VTK,sumedhasingla/VTK,johnkit/vtk-dev,sankhesh/VTK,johnkit/vtk-dev,jmerkow/VTK,SimVascular/VTK,demarle/VTK,sumedhasingla/VTK,mspark93/VTK,msmolens/VTK,mspark93/VTK,gram526/VTK,berendkleinhaneveld/VTK,ashray/VTK-EVM,msmolens/VTK,ashray/VTK-EVM,SimVascular/VTK,johnkit/vtk-dev,ashray/VTK-EVM,ashray/VTK-EVM,demarle/VTK,SimVascular/VTK,msmolens/VTK,jmerkow/VTK,gram526/VTK,SimVascular/VTK,sankhesh/VTK,keithroe/vtkoptix,hendradarwin/VTK,hendradarwin/VTK,keithroe/vtkoptix,jmerkow/VTK,sankhesh/VTK,msmolens/VTK,ashray/VTK-EVM,johnkit/vtk-dev,sumedhasingla/VTK,msmolens/VTK,candy7393/VTK,candy7393/VTK,johnkit/vtk-dev,berendkleinhaneveld/VTK,keithroe/vtkoptix,msmolens/VTK,SimVascular/VTK,berendkleinhaneveld/VTK,johnkit/vtk-dev,johnkit/vtk-dev,sankhesh/VTK,demarle/VTK,keithroe/vtkoptix,SimVascular/VTK,gram526/VTK,gram526/VTK,demarle/VTK,sankhesh/VTK,ashray/VTK-EVM,sumedhasingla/VTK,hendradarwin/VTK,candy7393/VTK,jmerkow/VTK,sumedhasingla/VTK,demarle/VTK,ashray/VTK-EVM,msmolens/VTK,sankhesh/VTK,gram526/VTK,msmolens/VTK,sumedhasingla/VTK,jmerkow/VTK,keithroe/vtkoptix,gram526/VTK,sankhesh/VTK,SimVascular/VTK,hendradarwin/VTK,berendkleinhaneveld/VTK,demarle/VTK,jmerkow/VTK,keithroe/vtkoptix,mspark93/VTK,keithroe/vtkoptix,candy7393/VTK,ashray/VTK-EVM,keithroe/vtkoptix,sankhesh/VTK,jmerkow/VTK,candy7393/VTK,demarle/VTK,hendradarwin/VTK,candy7393/VTK
|
java
|
## Code Before:
package vtk.rendering;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.event.MouseWheelEvent;
/**
* This class implement vtkEventInterceptor with no event interception at all.
*
* @see {@link MouseMotionListener} {@link MouseListener} {@link MouseWheelListener}
* {@link KeyListener}
*
* @author Sebastien Jourdain - [email protected], Kitware Inc 2013
*/
public class vtkAbstractEventInterceptor implements vtkEventInterceptor {
@Override
public boolean keyPressed(KeyEvent e) {
return false;
}
@Override
public boolean keyReleased(KeyEvent e) {
return false;
}
@Override
public boolean keyTyped(KeyEvent e) {
return false;
}
@Override
public boolean mouseDragged(MouseEvent e) {
return false;
}
@Override
public boolean mouseMoved(MouseEvent e) {
return false;
}
@Override
public boolean mouseClicked(MouseEvent e) {
return false;
}
@Override
public boolean mouseEntered(MouseEvent e) {
return false;
}
@Override
public boolean mouseExited(MouseEvent e) {
return false;
}
@Override
public boolean mousePressed(MouseEvent e) {
return false;
}
@Override
public boolean mouseReleased(MouseEvent e) {
return false;
}
@Override
public boolean mouseWheelMoved(MouseWheelEvent e) {
return false;
}
}
## Instruction:
Remove @Override annotation to prevent compilation issue on old Java compiler
Change-Id: I9ed0e8423f5ae028294d773799979865e0dd4e8f
## Code After:
package vtk.rendering;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.event.MouseWheelEvent;
/**
* This class implement vtkEventInterceptor with no event interception at all.
*
* @see {@link MouseMotionListener} {@link MouseListener} {@link MouseWheelListener}
* {@link KeyListener}
*
* @author Sebastien Jourdain - [email protected], Kitware Inc 2013
*/
public class vtkAbstractEventInterceptor implements vtkEventInterceptor {
public boolean keyPressed(KeyEvent e) {
return false;
}
public boolean keyReleased(KeyEvent e) {
return false;
}
public boolean keyTyped(KeyEvent e) {
return false;
}
public boolean mouseDragged(MouseEvent e) {
return false;
}
public boolean mouseMoved(MouseEvent e) {
return false;
}
public boolean mouseClicked(MouseEvent e) {
return false;
}
public boolean mouseEntered(MouseEvent e) {
return false;
}
public boolean mouseExited(MouseEvent e) {
return false;
}
public boolean mousePressed(MouseEvent e) {
return false;
}
public boolean mouseReleased(MouseEvent e) {
return false;
}
public boolean mouseWheelMoved(MouseWheelEvent e) {
return false;
}
}
|
...
public class vtkAbstractEventInterceptor implements vtkEventInterceptor {
public boolean keyPressed(KeyEvent e) {
return false;
}
public boolean keyReleased(KeyEvent e) {
return false;
}
public boolean keyTyped(KeyEvent e) {
return false;
}
public boolean mouseDragged(MouseEvent e) {
return false;
}
public boolean mouseMoved(MouseEvent e) {
return false;
}
public boolean mouseClicked(MouseEvent e) {
return false;
}
public boolean mouseEntered(MouseEvent e) {
return false;
}
public boolean mouseExited(MouseEvent e) {
return false;
}
public boolean mousePressed(MouseEvent e) {
return false;
}
public boolean mouseReleased(MouseEvent e) {
return false;
}
public boolean mouseWheelMoved(MouseWheelEvent e) {
return false;
}
...
|
3c3a697a525762df655ddaf1c0c91a2c06eb4a2b
|
cleanroom/Student.java
|
cleanroom/Student.java
|
import java.util.ArrayList;
import java.lang.*;
import java.math.BigDecimal;
public class Student {
int testVariable = 0;
int __cleanVariable = 0;
public int foo() {
return 4711 + __cleanVariable + testVariable;
}
public double bar() {
return 4711.0815 + testVariable;
}
public String baz() {
return "I am nice.";
}
public String foobar() {
return "I am dangerous.";
}
public static Object getNull() {
return null;
}
public static String doNull() {
return "";
}
public static void recur(int i) {
if(i > 0)
recur(i-1);
}
public static void recur(int i, double d) {
if(i > 0)
recur(i-1);
}
public static void ioob() {
int a[] = new int[10];
a[a.length + 32]++;
}
public Student() {
}
public Student(int x) {
}
}
|
import java.util.ArrayList;
import java.lang.*;
import java.math.BigDecimal;
public class Student {
int testVariable = 0;
int __cleanVariable = 1;
public int foo() {
int ret = 4711 + testVariable;
__cleanIteratorClass iterator = new __cleanIteratorClass();
return ret + iterator.getCleanVar();
}
public double bar() {
return 4711.0815 + testVariable;
}
public String baz() {
return "I am nice.";
}
public String foobar() {
return "I am dangerous.";
}
public static Object getNull() {
return null;
}
public static String doNull() {
return "";
}
public static void recur(int i) {
if(i > 0)
recur(i-1);
}
public static void recur(int i, double d) {
if(i > 0)
recur(i-1);
}
public static void ioob() {
int a[] = new int[10];
a[a.length + 32]++;
}
public Student() {
}
public Student(int x) {
}
private class __cleanIteratorClass {
public int getCleanVar() {
return __cleanVariable - 23;
}
}
}
|
Change cleanroom to use inner class.
|
Change cleanroom to use inner class.
|
Java
|
apache-2.0
|
FAU-Inf2/AuDoscore,FAU-Inf2/AuDoscore
|
java
|
## Code Before:
import java.util.ArrayList;
import java.lang.*;
import java.math.BigDecimal;
public class Student {
int testVariable = 0;
int __cleanVariable = 0;
public int foo() {
return 4711 + __cleanVariable + testVariable;
}
public double bar() {
return 4711.0815 + testVariable;
}
public String baz() {
return "I am nice.";
}
public String foobar() {
return "I am dangerous.";
}
public static Object getNull() {
return null;
}
public static String doNull() {
return "";
}
public static void recur(int i) {
if(i > 0)
recur(i-1);
}
public static void recur(int i, double d) {
if(i > 0)
recur(i-1);
}
public static void ioob() {
int a[] = new int[10];
a[a.length + 32]++;
}
public Student() {
}
public Student(int x) {
}
}
## Instruction:
Change cleanroom to use inner class.
## Code After:
import java.util.ArrayList;
import java.lang.*;
import java.math.BigDecimal;
public class Student {
int testVariable = 0;
int __cleanVariable = 1;
public int foo() {
int ret = 4711 + testVariable;
__cleanIteratorClass iterator = new __cleanIteratorClass();
return ret + iterator.getCleanVar();
}
public double bar() {
return 4711.0815 + testVariable;
}
public String baz() {
return "I am nice.";
}
public String foobar() {
return "I am dangerous.";
}
public static Object getNull() {
return null;
}
public static String doNull() {
return "";
}
public static void recur(int i) {
if(i > 0)
recur(i-1);
}
public static void recur(int i, double d) {
if(i > 0)
recur(i-1);
}
public static void ioob() {
int a[] = new int[10];
a[a.length + 32]++;
}
public Student() {
}
public Student(int x) {
}
private class __cleanIteratorClass {
public int getCleanVar() {
return __cleanVariable - 23;
}
}
}
|
...
import java.math.BigDecimal;
public class Student {
int testVariable = 0;
int __cleanVariable = 1;
public int foo() {
int ret = 4711 + testVariable;
__cleanIteratorClass iterator = new __cleanIteratorClass();
return ret + iterator.getCleanVar();
}
public double bar() {
...
public Student(int x) {
}
private class __cleanIteratorClass {
public int getCleanVar() {
return __cleanVariable - 23;
}
}
}
...
|
09d78bb23ffba9d1d709a3ba5cbabbe84a9b1978
|
server/macros/currency_usd_to_cad.py
|
server/macros/currency_usd_to_cad.py
|
import os
import re
import requests
USD_TO_CAD = 1.3139 # backup
def get_rate():
"""Get USD to CAD rate."""
try:
r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5)
return float(r.text.split(',')[1])
except Exception:
return USD_TO_CAD
def usd_to_cad(item, **kwargs):
"""Convert USD to CAD."""
rate = get_rate()
if os.environ.get('BEHAVE_TESTING'):
rate = USD_TO_CAD
def convert(match):
usd = float(match.group(1))
cad = rate * usd
return 'CAD %d' % cad
item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html'])
return item
name = 'usd_to_cad'
label = 'Convert USD to CAD'
shortcut = 'd'
callback = usd_to_cad
desks = ['SPORTS DESK', 'POLITICS']
|
import os
import re
import requests
USD_TO_CAD = 1.3139 # backup
def get_rate():
"""Get USD to CAD rate."""
try:
r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5)
return float(r.text.split(',')[1])
except Exception:
return USD_TO_CAD
def usd_to_cad(item, **kwargs):
"""Convert USD to CAD."""
rate = get_rate()
if os.environ.get('BEHAVE_TESTING'):
rate = USD_TO_CAD
def convert(match):
usd = float(match.group(1))
cad = rate * usd
return 'CAD %d' % cad
item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html'])
return item
name = 'usd_to_cad'
label = 'Convert USD to CAD'
shortcut = 'd'
callback = usd_to_cad
|
Delete the desks settings for macro
|
fix(macro): Delete the desks settings for macro
|
Python
|
agpl-3.0
|
pavlovicnemanja/superdesk,amagdas/superdesk,verifiedpixel/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,plamut/superdesk,marwoodandrew/superdesk,petrjasek/superdesk,fritzSF/superdesk,petrjasek/superdesk-ntb,verifiedpixel/superdesk,marwoodandrew/superdesk,superdesk/superdesk-aap,verifiedpixel/superdesk,superdesk/superdesk,akintolga/superdesk,sivakuna-aap/superdesk,sivakuna-aap/superdesk,liveblog/superdesk,pavlovicnemanja92/superdesk,superdesk/superdesk-ntb,ancafarcas/superdesk,hlmnrmr/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,akintolga/superdesk,marwoodandrew/superdesk-aap,ioanpocol/superdesk,liveblog/superdesk,pavlovicnemanja/superdesk,darconny/superdesk,fritzSF/superdesk,pavlovicnemanja/superdesk,pavlovicnemanja92/superdesk,amagdas/superdesk,liveblog/superdesk,thnkloud9/superdesk,superdesk/superdesk,pavlovicnemanja/superdesk,plamut/superdesk,mdhaman/superdesk,darconny/superdesk,akintolga/superdesk-aap,amagdas/superdesk,liveblog/superdesk,gbbr/superdesk,petrjasek/superdesk,superdesk/superdesk-ntb,hlmnrmr/superdesk,verifiedpixel/superdesk,superdesk/superdesk-aap,superdesk/superdesk-aap,marwoodandrew/superdesk-aap,pavlovicnemanja92/superdesk,hlmnrmr/superdesk,ioanpocol/superdesk-ntb,marwoodandrew/superdesk-aap,mdhaman/superdesk-aap,ancafarcas/superdesk,verifiedpixel/superdesk,marwoodandrew/superdesk,petrjasek/superdesk-ntb,mdhaman/superdesk,fritzSF/superdesk,pavlovicnemanja92/superdesk,mdhaman/superdesk,akintolga/superdesk-aap,plamut/superdesk,petrjasek/superdesk-ntb,sjunaid/superdesk,akintolga/superdesk-aap,amagdas/superdesk,mdhaman/superdesk-aap,ioanpocol/superdesk,gbbr/superdesk,Aca-jov/superdesk,akintolga/superdesk,marwoodandrew/superdesk,plamut/superdesk,darconny/superdesk,marwoodandrew/superdesk,amagdas/superdesk,fritzSF/superdesk,ioanpocol/superdesk-ntb,sjunaid/superdesk,ancafarcas/superdesk,marwoodandrew/superdesk-aap,petrjasek/superdesk-ntb,mugurrus/superdesk,superdesk/superdesk-aap,gbbr/superdesk,Aca-jov/superdesk,thnkloud9/superdesk,superdesk/superdesk-ntb,superdesk/superdesk-ntb,Aca-jov/superdesk,superdesk/superdesk,thnkloud9/superdesk,akintolga/superdesk,ioanpocol/superdesk,akintolga/superdesk-aap,mugurrus/superdesk,sivakuna-aap/superdesk,mugurrus/superdesk,superdesk/superdesk,liveblog/superdesk,akintolga/superdesk,petrjasek/superdesk,fritzSF/superdesk,sjunaid/superdesk,plamut/superdesk,petrjasek/superdesk,pavlovicnemanja92/superdesk,ioanpocol/superdesk-ntb
|
python
|
## Code Before:
import os
import re
import requests
USD_TO_CAD = 1.3139 # backup
def get_rate():
"""Get USD to CAD rate."""
try:
r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5)
return float(r.text.split(',')[1])
except Exception:
return USD_TO_CAD
def usd_to_cad(item, **kwargs):
"""Convert USD to CAD."""
rate = get_rate()
if os.environ.get('BEHAVE_TESTING'):
rate = USD_TO_CAD
def convert(match):
usd = float(match.group(1))
cad = rate * usd
return 'CAD %d' % cad
item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html'])
return item
name = 'usd_to_cad'
label = 'Convert USD to CAD'
shortcut = 'd'
callback = usd_to_cad
desks = ['SPORTS DESK', 'POLITICS']
## Instruction:
fix(macro): Delete the desks settings for macro
## Code After:
import os
import re
import requests
USD_TO_CAD = 1.3139 # backup
def get_rate():
"""Get USD to CAD rate."""
try:
r = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=USDCAD=X&f=nl1d1', timeout=5)
return float(r.text.split(',')[1])
except Exception:
return USD_TO_CAD
def usd_to_cad(item, **kwargs):
"""Convert USD to CAD."""
rate = get_rate()
if os.environ.get('BEHAVE_TESTING'):
rate = USD_TO_CAD
def convert(match):
usd = float(match.group(1))
cad = rate * usd
return 'CAD %d' % cad
item['body_html'] = re.sub('\$([0-9]+)', convert, item['body_html'])
return item
name = 'usd_to_cad'
label = 'Convert USD to CAD'
shortcut = 'd'
callback = usd_to_cad
|
# ... existing code ...
label = 'Convert USD to CAD'
shortcut = 'd'
callback = usd_to_cad
# ... rest of the code ...
|
2a852c3ca1ff30cb02740f7934d97c1fe2da3bbe
|
compress.py
|
compress.py
|
class Compress():
"""Compress"""
def encode(self, string):
"""Encodes string to byte representation"""
return b'0'
def decode(self, byteString):
"""Decodes bytes into a text string"""
return ""
|
import Queue as queue
class HuffmanNode:
"""Node in the Huffman coding tree"""
def __init__(self, symbol, freq):
self.parent = None
self.children = []
self.symbol = symbol
self.freq = freq
def set_parent(self, node):
node.add_child(self)
self.parent = node
def add_child(self, node):
self.children.append(node)
def is_leaf(self):
return len(self.children) == 0
class Compress:
"""Compress"""
def __init__(self):
self.word_list = []
self.huffman_tree = None
self.codeWordlist()
self.build_huffman_tree()
def codeWordlist(self):
wordfile = open('words256.txt', 'r')
for line in wordfile.readlines():
self.word_list.append(line.strip())
wordfile.close()
def build_huffman_tree(self):
fake_freq = 0.5
nodes = []
for word in self.word_list:
node = HuffmanNode(word, fake_freq)
fake_freq *= fake_freq
nodes.append(node)
priorityq = queue.PriorityQueue()
for node in nodes:
priorityq.put((node.freq, node))
while(priorityq.qsize() > 1):
n1 = priorityq.get()[1]
n2 = priorityq.get()[1]
parent = HuffmanNode("", n1.freq + n2.freq)
n1.set_parent(parent)
n2.set_parent(parent)
priorityq.put((parent.freq, parent))
self.huffman_tree = priorityq.get()[1]
def encode(self, string):
"""Encodes string to byte representation"""
return b'0'
def decode(self, byteString):
"""Decodes bytes into a text string"""
return ""
if __name__ == '__main__':
c = Compress()
|
Build tree for huffman coding
|
Build tree for huffman coding
|
Python
|
apache-2.0
|
rylans/text-compression-english
|
python
|
## Code Before:
class Compress():
"""Compress"""
def encode(self, string):
"""Encodes string to byte representation"""
return b'0'
def decode(self, byteString):
"""Decodes bytes into a text string"""
return ""
## Instruction:
Build tree for huffman coding
## Code After:
import Queue as queue
class HuffmanNode:
"""Node in the Huffman coding tree"""
def __init__(self, symbol, freq):
self.parent = None
self.children = []
self.symbol = symbol
self.freq = freq
def set_parent(self, node):
node.add_child(self)
self.parent = node
def add_child(self, node):
self.children.append(node)
def is_leaf(self):
return len(self.children) == 0
class Compress:
"""Compress"""
def __init__(self):
self.word_list = []
self.huffman_tree = None
self.codeWordlist()
self.build_huffman_tree()
def codeWordlist(self):
wordfile = open('words256.txt', 'r')
for line in wordfile.readlines():
self.word_list.append(line.strip())
wordfile.close()
def build_huffman_tree(self):
fake_freq = 0.5
nodes = []
for word in self.word_list:
node = HuffmanNode(word, fake_freq)
fake_freq *= fake_freq
nodes.append(node)
priorityq = queue.PriorityQueue()
for node in nodes:
priorityq.put((node.freq, node))
while(priorityq.qsize() > 1):
n1 = priorityq.get()[1]
n2 = priorityq.get()[1]
parent = HuffmanNode("", n1.freq + n2.freq)
n1.set_parent(parent)
n2.set_parent(parent)
priorityq.put((parent.freq, parent))
self.huffman_tree = priorityq.get()[1]
def encode(self, string):
"""Encodes string to byte representation"""
return b'0'
def decode(self, byteString):
"""Decodes bytes into a text string"""
return ""
if __name__ == '__main__':
c = Compress()
|
...
import Queue as queue
class HuffmanNode:
"""Node in the Huffman coding tree"""
def __init__(self, symbol, freq):
self.parent = None
self.children = []
self.symbol = symbol
self.freq = freq
def set_parent(self, node):
node.add_child(self)
self.parent = node
def add_child(self, node):
self.children.append(node)
def is_leaf(self):
return len(self.children) == 0
class Compress:
"""Compress"""
def __init__(self):
self.word_list = []
self.huffman_tree = None
self.codeWordlist()
self.build_huffman_tree()
def codeWordlist(self):
wordfile = open('words256.txt', 'r')
for line in wordfile.readlines():
self.word_list.append(line.strip())
wordfile.close()
def build_huffman_tree(self):
fake_freq = 0.5
nodes = []
for word in self.word_list:
node = HuffmanNode(word, fake_freq)
fake_freq *= fake_freq
nodes.append(node)
priorityq = queue.PriorityQueue()
for node in nodes:
priorityq.put((node.freq, node))
while(priorityq.qsize() > 1):
n1 = priorityq.get()[1]
n2 = priorityq.get()[1]
parent = HuffmanNode("", n1.freq + n2.freq)
n1.set_parent(parent)
n2.set_parent(parent)
priorityq.put((parent.freq, parent))
self.huffman_tree = priorityq.get()[1]
def encode(self, string):
"""Encodes string to byte representation"""
return b'0'
...
def decode(self, byteString):
"""Decodes bytes into a text string"""
return ""
if __name__ == '__main__':
c = Compress()
...
|
d3b526c5079dc61d3bb8a80363c9448de07da331
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
env.runtime = 'production'
env.hosts = ['chimera.ericholscher.com']
env.user = 'docs'
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
def update_requirements():
"Update requirements in the virtualenv."
run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir))
def push():
"Push new code, but don't restart/reload."
local('git push origin master')
with cd(env.code_dir):
run('git pull origin master')
def pull():
"Pull new code"
with cd(env.code_dir):
run('git pull origin master')
def restart():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-gunicorn")
def celery():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-celery")
def migrate(project=None):
if project:
run('django-admin.py migrate %s' % project)
else:
run('django-admin.py migrate')
|
from fabric.api import *
env.runtime = 'production'
env.hosts = ['chimera.ericholscher.com']
env.user = 'docs'
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
def push():
"Push new code, but don't restart/reload."
local('git push origin master')
with cd(env.code_dir):
run('git pull origin master')
def update_requirements():
"Update requirements in the virtualenv."
run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir))
def migrate(project=None):
if project:
run('django-admin.py migrate %s' % project)
else:
run('django-admin.py migrate')
def restart():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-gunicorn")
def celery():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-celery")
def pull():
"Pull new code"
with cd(env.code_dir):
run('git pull origin master')
def full_deploy():
push()
update_requirements()
migrate()
restart()
celery()
|
Make it easy to do a full deploy with fab
|
Make it easy to do a full deploy with fab
|
Python
|
mit
|
cgourlay/readthedocs.org,sunnyzwh/readthedocs.org,attakei/readthedocs-oauth,davidfischer/readthedocs.org,nikolas/readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,johncosta/private-readthedocs.org,stevepiercy/readthedocs.org,mrshoki/readthedocs.org,michaelmcandrew/readthedocs.org,royalwang/readthedocs.org,cgourlay/readthedocs.org,tddv/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,raven47git/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,rtfd/readthedocs.org,CedarLogic/readthedocs.org,agjohnson/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,d0ugal/readthedocs.org,cgourlay/readthedocs.org,Carreau/readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,nikolas/readthedocs.org,singingwolfboy/readthedocs.org,wijerasa/readthedocs.org,royalwang/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,safwanrahman/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,emawind84/readthedocs.org,mhils/readthedocs.org,SteveViss/readthedocs.org,alex/readthedocs.org,nikolas/readthedocs.org,d0ugal/readthedocs.org,stevepiercy/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,VishvajitP/readthedocs.org,mhils/readthedocs.org,stevepiercy/readthedocs.org,alex/readthedocs.org,GovReady/readthedocs.org,agjohnson/readthedocs.org,attakei/readthedocs-oauth,raven47git/readthedocs.org,KamranMackey/readthedocs.org,GovReady/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,jerel/readthedocs.org,ojii/readthedocs.org,alex/readthedocs.org,istresearch/readthedocs.org,Tazer/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,mhils/readthedocs.org,hach-que/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,gjtorikian/readthedocs.org,titiushko/readthedocs.org,Carreau/readthedocs.org,dirn/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,takluyver/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,GovReady/readthedocs.org,kdkeyser/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,espdev/readthedocs.org,d0ugal/readthedocs.org,rtfd/readthedocs.org,hach-que/readthedocs.org,nyergler/pythonslides,royalwang/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,asampat3090/readthedocs.org,gjtorikian/readthedocs.org,davidfischer/readthedocs.org,soulshake/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,kenwang76/readthedocs.org,nyergler/pythonslides,CedarLogic/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,sid-kap/readthedocs.org,safwanrahman/readthedocs.org,singingwolfboy/readthedocs.org,emawind84/readthedocs.org,hach-que/readthedocs.org,SteveViss/readthedocs.org,kenwang76/readthedocs.org,sunnyzwh/readthedocs.org,dirn/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,nyergler/pythonslides,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,sils1297/readthedocs.org,mrshoki/readthedocs.org,johncosta/private-readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,tddv/readthedocs.org,sunnyzwh/readthedocs.org,Carreau/readthedocs.org,wanghaven/readthedocs.org,KamranMackey/readthedocs.org,ojii/readthedocs.org,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,davidfischer/readthedocs.org,soulshake/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,attakei/readthedocs-oauth,Carreau/readthedocs.org,agjohnson/readthedocs.org,titiushko/readthedocs.org,singingwolfboy/readthedocs.org,takluyver/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,alex/readthedocs.org,hach-que/readthedocs.org,VishvajitP/readthedocs.org,clarkperkins/readthedocs.org,ojii/readthedocs.org,SteveViss/readthedocs.org,mrshoki/readthedocs.org,kenwang76/readthedocs.org,pombredanne/readthedocs.org,Tazer/readthedocs.org,techtonik/readthedocs.org,kenshinthebattosai/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,michaelmcandrew/readthedocs.org,VishvajitP/readthedocs.org,titiushko/readthedocs.org,gjtorikian/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,asampat3090/readthedocs.org,wijerasa/readthedocs.org,pombredanne/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,emawind84/readthedocs.org,emawind84/readthedocs.org,KamranMackey/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,VishvajitP/readthedocs.org,espdev/readthedocs.org,mrshoki/readthedocs.org,johncosta/private-readthedocs.org,singingwolfboy/readthedocs.org,sid-kap/readthedocs.org,raven47git/readthedocs.org,LukasBoersma/readthedocs.org,laplaceliu/readthedocs.org,michaelmcandrew/readthedocs.org,espdev/readthedocs.org,attakei/readthedocs-oauth,pombredanne/readthedocs.org,sils1297/readthedocs.org,asampat3090/readthedocs.org,raven47git/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org
|
python
|
## Code Before:
from fabric.api import *
env.runtime = 'production'
env.hosts = ['chimera.ericholscher.com']
env.user = 'docs'
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
def update_requirements():
"Update requirements in the virtualenv."
run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir))
def push():
"Push new code, but don't restart/reload."
local('git push origin master')
with cd(env.code_dir):
run('git pull origin master')
def pull():
"Pull new code"
with cd(env.code_dir):
run('git pull origin master')
def restart():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-gunicorn")
def celery():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-celery")
def migrate(project=None):
if project:
run('django-admin.py migrate %s' % project)
else:
run('django-admin.py migrate')
## Instruction:
Make it easy to do a full deploy with fab
## Code After:
from fabric.api import *
env.runtime = 'production'
env.hosts = ['chimera.ericholscher.com']
env.user = 'docs'
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
def push():
"Push new code, but don't restart/reload."
local('git push origin master')
with cd(env.code_dir):
run('git pull origin master')
def update_requirements():
"Update requirements in the virtualenv."
run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir))
def migrate(project=None):
if project:
run('django-admin.py migrate %s' % project)
else:
run('django-admin.py migrate')
def restart():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-gunicorn")
def celery():
"Restart (or just start) the server"
env.user = "root"
run("restart readthedocs-celery")
def pull():
"Pull new code"
with cd(env.code_dir):
run('git pull origin master')
def full_deploy():
push()
update_requirements()
migrate()
restart()
celery()
|
...
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
def push():
"Push new code, but don't restart/reload."
local('git push origin master')
...
with cd(env.code_dir):
run('git pull origin master')
def update_requirements():
"Update requirements in the virtualenv."
run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir))
def migrate(project=None):
if project:
run('django-admin.py migrate %s' % project)
else:
run('django-admin.py migrate')
def restart():
"Restart (or just start) the server"
...
env.user = "root"
run("restart readthedocs-celery")
def pull():
"Pull new code"
with cd(env.code_dir):
run('git pull origin master')
def full_deploy():
push()
update_requirements()
migrate()
restart()
celery()
...
|
093c9065de9e0e08f248bbb84696bf30309bd536
|
examples/parallel/timer.py
|
examples/parallel/timer.py
|
import rx
import concurrent.futures
import time
seconds = [5, 1, 2, 4, 3]
def sleep(t):
time.sleep(t)
return t
def output(result):
print '%d seconds' % result
with concurrent.futures.ProcessPoolExecutor(5) as executor:
rx.Observable.from_(seconds).flat_map(
lambda s: executor.submit(sleep, s)
).subscribe(output)
# 1 seconds
# 2 seconds
# 3 seconds
# 4 seconds
# 5 seconds
|
from __future__ import print_function
import rx
import concurrent.futures
import time
seconds = [5, 1, 2, 4, 3]
def sleep(t):
time.sleep(t)
return t
def output(result):
print('%d seconds' % result)
with concurrent.futures.ProcessPoolExecutor(5) as executor:
rx.Observable.from_(seconds).flat_map(
lambda s: executor.submit(sleep, s)
).subscribe(output)
# 1 seconds
# 2 seconds
# 3 seconds
# 4 seconds
# 5 seconds
|
Fix parallel example for Python 3
|
Fix parallel example for Python 3
|
Python
|
mit
|
dbrattli/RxPY,ReactiveX/RxPY,ReactiveX/RxPY
|
python
|
## Code Before:
import rx
import concurrent.futures
import time
seconds = [5, 1, 2, 4, 3]
def sleep(t):
time.sleep(t)
return t
def output(result):
print '%d seconds' % result
with concurrent.futures.ProcessPoolExecutor(5) as executor:
rx.Observable.from_(seconds).flat_map(
lambda s: executor.submit(sleep, s)
).subscribe(output)
# 1 seconds
# 2 seconds
# 3 seconds
# 4 seconds
# 5 seconds
## Instruction:
Fix parallel example for Python 3
## Code After:
from __future__ import print_function
import rx
import concurrent.futures
import time
seconds = [5, 1, 2, 4, 3]
def sleep(t):
time.sleep(t)
return t
def output(result):
print('%d seconds' % result)
with concurrent.futures.ProcessPoolExecutor(5) as executor:
rx.Observable.from_(seconds).flat_map(
lambda s: executor.submit(sleep, s)
).subscribe(output)
# 1 seconds
# 2 seconds
# 3 seconds
# 4 seconds
# 5 seconds
|
// ... existing code ...
from __future__ import print_function
import rx
import concurrent.futures
import time
// ... modified code ...
def output(result):
print('%d seconds' % result)
with concurrent.futures.ProcessPoolExecutor(5) as executor:
rx.Observable.from_(seconds).flat_map(
// ... rest of the code ...
|
1ed49dae9d88e1e277a0eef879dec53ed925417a
|
highlander/exceptions.py
|
highlander/exceptions.py
|
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
|
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
class InvalidPidDirectoryError(Exception):
""" An exception when an invalid PID directory is detected."""
|
Add a new exception since we are making a directory now.
|
Add a new exception since we are making a directory now.
|
Python
|
mit
|
chriscannon/highlander
|
python
|
## Code Before:
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
## Instruction:
Add a new exception since we are making a directory now.
## Code After:
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
class InvalidPidDirectoryError(Exception):
""" An exception when an invalid PID directory is detected."""
|
...
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
class InvalidPidDirectoryError(Exception):
""" An exception when an invalid PID directory is detected."""
...
|
5fb76e9389cf0e4b64f726786846f114ba34f48d
|
okapi-core/src/main/java/okapi/util/HttpResponse.java
|
okapi-core/src/main/java/okapi/util/HttpResponse.java
|
/*
* Copyright (c) 2015-2016, Index Data
* All rights reserved.
* See the file LICENSE for details.
*/
package okapi.util;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.ext.web.RoutingContext;
public class HttpResponse {
static public void responseError(RoutingContext ctx, int code, Throwable cause) {
responseText(ctx, code).end(cause.getMessage());
}
static public HttpServerResponse responseText(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "text/plain");
}
static public HttpServerResponse responseJson(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "application/json");
}
}
|
/*
* Copyright (c) 2015-2016, Index Data
* All rights reserved.
* See the file LICENSE for details.
*/
package okapi.util;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.ext.web.RoutingContext;
public class HttpResponse {
static public void responseError(RoutingContext ctx, ErrorType t, Throwable cause) {
int code = 500;
switch (t) {
case OK: code = 200; break;
case INTERNAL: code = 500; break;
case USER: code = 400; break;
case NOT_FOUND: code = 404; break;
case ANY: code = 500; break;
}
responseError(ctx, code, cause);
}
static public void responseError(RoutingContext ctx, int code, Throwable cause) {
responseText(ctx, code).end(cause.getMessage());
}
static public HttpServerResponse responseText(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "text/plain");
}
static public HttpServerResponse responseJson(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "application/json");
}
}
|
Add responseError: takes ErrorType and maps to HTTP error
|
Add responseError: takes ErrorType and maps to HTTP error
|
Java
|
apache-2.0
|
julianladisch/okapi-acquisitions-poc,funkymalc/okapi,folio-org/okapi,julianladisch/okapi-acquisitions-poc,julianladisch/okapi-acquisitions-poc,folio-org/okapi,funkymalc/okapi
|
java
|
## Code Before:
/*
* Copyright (c) 2015-2016, Index Data
* All rights reserved.
* See the file LICENSE for details.
*/
package okapi.util;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.ext.web.RoutingContext;
public class HttpResponse {
static public void responseError(RoutingContext ctx, int code, Throwable cause) {
responseText(ctx, code).end(cause.getMessage());
}
static public HttpServerResponse responseText(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "text/plain");
}
static public HttpServerResponse responseJson(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "application/json");
}
}
## Instruction:
Add responseError: takes ErrorType and maps to HTTP error
## Code After:
/*
* Copyright (c) 2015-2016, Index Data
* All rights reserved.
* See the file LICENSE for details.
*/
package okapi.util;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.ext.web.RoutingContext;
public class HttpResponse {
static public void responseError(RoutingContext ctx, ErrorType t, Throwable cause) {
int code = 500;
switch (t) {
case OK: code = 200; break;
case INTERNAL: code = 500; break;
case USER: code = 400; break;
case NOT_FOUND: code = 404; break;
case ANY: code = 500; break;
}
responseError(ctx, code, cause);
}
static public void responseError(RoutingContext ctx, int code, Throwable cause) {
responseText(ctx, code).end(cause.getMessage());
}
static public HttpServerResponse responseText(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "text/plain");
}
static public HttpServerResponse responseJson(RoutingContext ctx, int code) {
return ctx.response()
.setStatusCode(code)
.putHeader("Content-Type", "application/json");
}
}
|
...
import io.vertx.ext.web.RoutingContext;
public class HttpResponse {
static public void responseError(RoutingContext ctx, ErrorType t, Throwable cause) {
int code = 500;
switch (t) {
case OK: code = 200; break;
case INTERNAL: code = 500; break;
case USER: code = 400; break;
case NOT_FOUND: code = 404; break;
case ANY: code = 500; break;
}
responseError(ctx, code, cause);
}
static public void responseError(RoutingContext ctx, int code, Throwable cause) {
responseText(ctx, code).end(cause.getMessage());
...
|
f288b3ead236dab2cd8bcad9ce9931f2e4b61e02
|
app/src/main/java/org/wikipedia/feed/aggregated/AggregatedFeedContent.java
|
app/src/main/java/org/wikipedia/feed/aggregated/AggregatedFeedContent.java
|
package org.wikipedia.feed.aggregated;
import android.support.annotation.NonNull;
import org.wikipedia.feed.model.CardPageItem;
import org.wikipedia.feed.mostread.MostReadArticles;
public class AggregatedFeedContent {
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem tfa;
@SuppressWarnings("NullableProblems") @NonNull private MostReadArticles mostread;
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem random;
public CardPageItem tfa() {
return tfa;
}
public MostReadArticles mostRead() {
return mostread;
}
}
|
package org.wikipedia.feed.aggregated;
import android.support.annotation.NonNull;
import org.wikipedia.feed.model.CardPageItem;
public class AggregatedFeedContent {
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem tfa;
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem random;
public CardPageItem tfa() {
return tfa;
}
}
|
Remove most-read field from aggregated content model
|
Remove most-read field from aggregated content model
This is unused, since we get most-read results from MostReadClient.
We can put it back in if and when we move to using the most-read results
from the aggregated response.
Change-Id: I0f31d1b9a6dd772f9c997ae17bfa18dfdbd8c310
|
Java
|
apache-2.0
|
SAGROUP2/apps-android-wikipedia,wikimedia/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,wikimedia/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia
|
java
|
## Code Before:
package org.wikipedia.feed.aggregated;
import android.support.annotation.NonNull;
import org.wikipedia.feed.model.CardPageItem;
import org.wikipedia.feed.mostread.MostReadArticles;
public class AggregatedFeedContent {
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem tfa;
@SuppressWarnings("NullableProblems") @NonNull private MostReadArticles mostread;
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem random;
public CardPageItem tfa() {
return tfa;
}
public MostReadArticles mostRead() {
return mostread;
}
}
## Instruction:
Remove most-read field from aggregated content model
This is unused, since we get most-read results from MostReadClient.
We can put it back in if and when we move to using the most-read results
from the aggregated response.
Change-Id: I0f31d1b9a6dd772f9c997ae17bfa18dfdbd8c310
## Code After:
package org.wikipedia.feed.aggregated;
import android.support.annotation.NonNull;
import org.wikipedia.feed.model.CardPageItem;
public class AggregatedFeedContent {
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem tfa;
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem random;
public CardPageItem tfa() {
return tfa;
}
}
|
// ... existing code ...
import android.support.annotation.NonNull;
import org.wikipedia.feed.model.CardPageItem;
public class AggregatedFeedContent {
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem tfa;
@SuppressWarnings("NullableProblems") @NonNull private CardPageItem random;
public CardPageItem tfa() {
return tfa;
}
}
// ... rest of the code ...
|
26d063cd78140d69160e16364a6cdda1e26516d2
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
NEWS = open(os.path.join(here, 'NEWS.txt')).read()
version = '1.0-a'
install_requires = [
'jnius>=1.0',
]
setup(
name='engerek',
version=version,
description='Turkish natural language processing tools for Python',
long_description=README + '\n\n' + NEWS,
classifiers=[
'License :: Other/Proprietary License',
'Topic :: Text Processing :: Linguistic',
],
keywords='turkish nlp tokenizer stemmer deasciifier',
author=u'Çilek Ağacı',
author_email='[email protected]',
url='http://cilekagaci.com/',
license='Proprietary',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
entry_points={
#'console_scripts': ['engerek=engerek:main'],
}
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
NEWS = open(os.path.join(here, 'NEWS.txt')).read()
version = '1.0-a'
install_requires = [
'jnius==1.1-dev',
'Cython==0.19.2',
]
setup(
name='engerek',
version=version,
description='Turkish natural language processing tools for Python',
long_description=README + '\n\n' + NEWS,
classifiers=[
'License :: Other/Proprietary License',
'Topic :: Text Processing :: Linguistic',
],
keywords='turkish nlp tokenizer stemmer deasciifier',
author=u'Çilek Ağacı',
author_email='[email protected]',
url='http://cilekagaci.com/',
license='Proprietary',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
entry_points={
#'console_scripts': ['engerek=engerek:main'],
},
dependency_links = ['http://github.com/kivy/pyjnius/tarball/master#egg=jnius-1.1-dev']
)
|
Use github master for pyjnius
|
Use github master for pyjnius
|
Python
|
apache-2.0
|
cilekagaci/engerek
|
python
|
## Code Before:
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
NEWS = open(os.path.join(here, 'NEWS.txt')).read()
version = '1.0-a'
install_requires = [
'jnius>=1.0',
]
setup(
name='engerek',
version=version,
description='Turkish natural language processing tools for Python',
long_description=README + '\n\n' + NEWS,
classifiers=[
'License :: Other/Proprietary License',
'Topic :: Text Processing :: Linguistic',
],
keywords='turkish nlp tokenizer stemmer deasciifier',
author=u'Çilek Ağacı',
author_email='[email protected]',
url='http://cilekagaci.com/',
license='Proprietary',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
entry_points={
#'console_scripts': ['engerek=engerek:main'],
}
)
## Instruction:
Use github master for pyjnius
## Code After:
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
NEWS = open(os.path.join(here, 'NEWS.txt')).read()
version = '1.0-a'
install_requires = [
'jnius==1.1-dev',
'Cython==0.19.2',
]
setup(
name='engerek',
version=version,
description='Turkish natural language processing tools for Python',
long_description=README + '\n\n' + NEWS,
classifiers=[
'License :: Other/Proprietary License',
'Topic :: Text Processing :: Linguistic',
],
keywords='turkish nlp tokenizer stemmer deasciifier',
author=u'Çilek Ağacı',
author_email='[email protected]',
url='http://cilekagaci.com/',
license='Proprietary',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
entry_points={
#'console_scripts': ['engerek=engerek:main'],
},
dependency_links = ['http://github.com/kivy/pyjnius/tarball/master#egg=jnius-1.1-dev']
)
|
// ... existing code ...
version = '1.0-a'
install_requires = [
'jnius==1.1-dev',
'Cython==0.19.2',
]
setup(
// ... modified code ...
install_requires=install_requires,
entry_points={
#'console_scripts': ['engerek=engerek:main'],
},
dependency_links = ['http://github.com/kivy/pyjnius/tarball/master#egg=jnius-1.1-dev']
)
// ... rest of the code ...
|
e654cea816be8c4a79da66efbc50a5698a51ba5b
|
plantcv/plantcv/print_results.py
|
plantcv/plantcv/print_results.py
|
import json
import os
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = outputs.observations
else:
hierarchical_data = {"metadata": {}, "observations": outputs.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
|
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
|
Add deprecation warning and use new method
|
Add deprecation warning and use new method
|
Python
|
mit
|
stiphyMT/plantcv,danforthcenter/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,stiphyMT/plantcv,danforthcenter/plantcv
|
python
|
## Code Before:
import json
import os
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = outputs.observations
else:
hierarchical_data = {"metadata": {}, "observations": outputs.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
## Instruction:
Add deprecation warning and use new method
## Code After:
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
|
// ... existing code ...
from plantcv.plantcv import outputs
// ... modified code ...
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
// ... rest of the code ...
|
af8d25d74dbbfcb25bcdfb454125d834644bc1bc
|
bin/app_setup.py
|
bin/app_setup.py
|
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
else:
print hook.application_name + " is already setup"
|
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path
|
Append G(app) to os $PATH
|
Append G(app) to os $PATH
|
Python
|
mit
|
adnane1deev/Hook
|
python
|
## Code Before:
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
else:
print hook.application_name + " is already setup"
## Instruction:
Append G(app) to os $PATH
## Code After:
import hook_system_variables as hook
import os_operations as op
import os
def setup():
home_dir = op.get_home()
app_tree = home_dir + op.separator() + hook.data_storage_path
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path
|
# ... existing code ...
if not os.path.exists(app_tree):
op.create_tree(app_tree)
file_absolute_path = os.path.abspath(__file__)
base_dir = os.path.split(file_absolute_path)[0]
hook_absolute_path = base_dir.rsplit(op.separator(), 1)[0]
append_hook_to_sys_path(hook_absolute_path)
else:
print hook.application_name + " is already setup"
def append_hook_to_sys_path(_path):
os.environ['PATH'] += os.pathsep + _path
# ... rest of the code ...
|
fb82b4f77379ddd1525947cc61f1c46c34674da4
|
froide/publicbody/admin.py
|
froide/publicbody/admin.py
|
from django.contrib import admin
from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic,
Jurisdiction)
class PublicBodyAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',)
list_filter = ('classification', 'topic',)
search_fields = ['name', "description"]
exclude = ('confirmed',)
class FoiLawAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("jurisdiction", "name",)}
list_display = ('name', 'meta',)
class JurisdictionAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
class PublicBodyTopicAdmin(admin.ModelAdmin):
pass
admin.site.register(PublicBody, PublicBodyAdmin)
admin.site.register(FoiLaw, FoiLawAdmin)
admin.site.register(Jurisdiction, JurisdictionAdmin)
admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
|
from django.contrib import admin
from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic,
Jurisdiction)
class PublicBodyAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',)
list_filter = ('classification', 'topic', 'jurisdiction',)
search_fields = ['name', "description"]
exclude = ('confirmed',)
class FoiLawAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("jurisdiction", "name",)}
list_display = ('name', 'meta',)
class JurisdictionAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
class PublicBodyTopicAdmin(admin.ModelAdmin):
pass
admin.site.register(PublicBody, PublicBodyAdmin)
admin.site.register(FoiLaw, FoiLawAdmin)
admin.site.register(Jurisdiction, JurisdictionAdmin)
admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
|
Add list filter by jurisdiction for public bodies
|
Add list filter by jurisdiction for public bodies
|
Python
|
mit
|
LilithWittmann/froide,fin/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,stefanw/froide,ryankanno/froide,okfse/froide,stefanw/froide,catcosmo/froide,fin/froide,ryankanno/froide,CodeforHawaii/froide,stefanw/froide,LilithWittmann/froide,CodeforHawaii/froide,okfse/froide,okfse/froide,LilithWittmann/froide,fin/froide,ryankanno/froide,ryankanno/froide,stefanw/froide,CodeforHawaii/froide,catcosmo/froide,stefanw/froide,catcosmo/froide,catcosmo/froide,LilithWittmann/froide,catcosmo/froide,LilithWittmann/froide,fin/froide,okfse/froide,CodeforHawaii/froide
|
python
|
## Code Before:
from django.contrib import admin
from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic,
Jurisdiction)
class PublicBodyAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',)
list_filter = ('classification', 'topic',)
search_fields = ['name', "description"]
exclude = ('confirmed',)
class FoiLawAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("jurisdiction", "name",)}
list_display = ('name', 'meta',)
class JurisdictionAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
class PublicBodyTopicAdmin(admin.ModelAdmin):
pass
admin.site.register(PublicBody, PublicBodyAdmin)
admin.site.register(FoiLaw, FoiLawAdmin)
admin.site.register(Jurisdiction, JurisdictionAdmin)
admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
## Instruction:
Add list filter by jurisdiction for public bodies
## Code After:
from django.contrib import admin
from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic,
Jurisdiction)
class PublicBodyAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',)
list_filter = ('classification', 'topic', 'jurisdiction',)
search_fields = ['name', "description"]
exclude = ('confirmed',)
class FoiLawAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("jurisdiction", "name",)}
list_display = ('name', 'meta',)
class JurisdictionAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
class PublicBodyTopicAdmin(admin.ModelAdmin):
pass
admin.site.register(PublicBody, PublicBodyAdmin)
admin.site.register(FoiLaw, FoiLawAdmin)
admin.site.register(Jurisdiction, JurisdictionAdmin)
admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
|
# ... existing code ...
class PublicBodyAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',)
list_filter = ('classification', 'topic', 'jurisdiction',)
search_fields = ['name', "description"]
exclude = ('confirmed',)
# ... rest of the code ...
|
667294dcc3b8ab34618ad674c2b6ac8efeec0620
|
places/admin.py
|
places/admin.py
|
from django.contrib.gis import admin
from models import Place
admin.site.register(Place, admin.OSMGeoAdmin)
|
from django.contrib.gis import admin
from models import Place
try:
_model_admin = admin.OSMGeoAdmin
except AttributeError:
_model_admin = admin.ModelAdmin
admin.site.register(Place, _model_admin)
|
Make it possible to run dev server on my desktop.
|
Make it possible to run dev server on my desktop.
While I'm accessing a suitable database remotely, I don't have enough
stuff installed locally to have OSMGeoAdmin (no GDAL installed, for
example).
|
Python
|
bsd-3-clause
|
MAPC/masshealth,MAPC/masshealth
|
python
|
## Code Before:
from django.contrib.gis import admin
from models import Place
admin.site.register(Place, admin.OSMGeoAdmin)
## Instruction:
Make it possible to run dev server on my desktop.
While I'm accessing a suitable database remotely, I don't have enough
stuff installed locally to have OSMGeoAdmin (no GDAL installed, for
example).
## Code After:
from django.contrib.gis import admin
from models import Place
try:
_model_admin = admin.OSMGeoAdmin
except AttributeError:
_model_admin = admin.ModelAdmin
admin.site.register(Place, _model_admin)
|
# ... existing code ...
from django.contrib.gis import admin
from models import Place
try:
_model_admin = admin.OSMGeoAdmin
except AttributeError:
_model_admin = admin.ModelAdmin
admin.site.register(Place, _model_admin)
# ... rest of the code ...
|
25d4fabce29433bc1381a03a44a67cc588688e7f
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'CouchDB-FUSE',
version = '0.1',
description = 'CouchDB FUSE module',
long_description = \
"""This is a Python FUSE module for CouchDB. It allows CouchDB document
attachments to be mounted on a virtual filesystem and edited directly.""",
author = 'Jason Davies',
author_email = '[email protected]',
license = 'BSD',
url = 'http://code.google.com/p/couchdb-fuse/',
zip_safe = True,
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database :: Front-Ends',
],
packages = ['couchdbfuse'],
entry_points = {
'console_scripts': [
'couchmount = couchdbfuse:main',
],
},
install_requires = ['CouchDB>=0.5dev_r125'],
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'CouchDB-FUSE',
version = '0.1',
description = 'CouchDB FUSE module',
long_description = \
"""This is a Python FUSE module for CouchDB. It allows CouchDB document
attachments to be mounted on a virtual filesystem and edited directly.""",
author = 'Jason Davies',
author_email = '[email protected]',
license = 'BSD',
url = 'http://code.google.com/p/couchdb-fuse/',
zip_safe = True,
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database :: Front-Ends',
],
packages = ['couchdbfuse'],
entry_points = {
'console_scripts': [
'couchmount = couchdbfuse:main',
],
},
install_requires = ['CouchDB >= 0.5'],
)
|
Update couchdb-python dependency version to 0.5 in anticipation of its release.
|
Update couchdb-python dependency version to 0.5 in anticipation of its release.
git-svn-id: fdb8975c015a424b33c0997a6b0d758f3a24819f@9 bfab2ddc-a81c-11dd-9a07-0f3041a8e97c
|
Python
|
bsd-3-clause
|
cozy-labs/cozy-fuse,jasondavies/couchdb-fuse
|
python
|
## Code Before:
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'CouchDB-FUSE',
version = '0.1',
description = 'CouchDB FUSE module',
long_description = \
"""This is a Python FUSE module for CouchDB. It allows CouchDB document
attachments to be mounted on a virtual filesystem and edited directly.""",
author = 'Jason Davies',
author_email = '[email protected]',
license = 'BSD',
url = 'http://code.google.com/p/couchdb-fuse/',
zip_safe = True,
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database :: Front-Ends',
],
packages = ['couchdbfuse'],
entry_points = {
'console_scripts': [
'couchmount = couchdbfuse:main',
],
},
install_requires = ['CouchDB>=0.5dev_r125'],
)
## Instruction:
Update couchdb-python dependency version to 0.5 in anticipation of its release.
git-svn-id: fdb8975c015a424b33c0997a6b0d758f3a24819f@9 bfab2ddc-a81c-11dd-9a07-0f3041a8e97c
## Code After:
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'CouchDB-FUSE',
version = '0.1',
description = 'CouchDB FUSE module',
long_description = \
"""This is a Python FUSE module for CouchDB. It allows CouchDB document
attachments to be mounted on a virtual filesystem and edited directly.""",
author = 'Jason Davies',
author_email = '[email protected]',
license = 'BSD',
url = 'http://code.google.com/p/couchdb-fuse/',
zip_safe = True,
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database :: Front-Ends',
],
packages = ['couchdbfuse'],
entry_points = {
'console_scripts': [
'couchmount = couchdbfuse:main',
],
},
install_requires = ['CouchDB >= 0.5'],
)
|
# ... existing code ...
],
},
install_requires = ['CouchDB >= 0.5'],
)
# ... rest of the code ...
|
3089eae072bd2e871c11251961ec35a09b83dd38
|
setup.py
|
setup.py
|
from setuptools import setup, Extension
setup(
name = 'python-ad',
version = '0.9',
description = 'An AD client library for Python',
author = 'Geert Jansen',
author_email = '[email protected]',
url = 'http://code.google.com/p/python-ad',
license = 'MIT',
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python'],
package_dir = {'': 'lib'},
packages = ['ad', 'ad.core', 'ad.protocol', 'ad.util'],
install_requires = [ 'python-ldap', 'dnspython', 'ply' ],
ext_modules = [Extension('ad.protocol.krb5', ['lib/ad/protocol/krb5.c'],
libraries=['krb5'])],
test_suite = 'nose.collector'
)
|
from setuptools import setup, Extension
setup(
name = 'python-ad',
version = '0.9',
description = 'An AD client library for Python',
author = 'Geert Jansen',
author_email = '[email protected]',
url = 'https://github.com/geertj/python-ad',
license = 'MIT',
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python'],
package_dir = {'': 'lib'},
packages = ['ad', 'ad.core', 'ad.protocol', 'ad.util'],
install_requires = [ 'python-ldap', 'dnspython', 'ply' ],
ext_modules = [Extension('ad.protocol.krb5', ['lib/ad/protocol/krb5.c'],
libraries=['krb5'])],
test_suite = 'nose.collector'
)
|
Change email address and home page.
|
Change email address and home page.
|
Python
|
mit
|
geertj/python-ad,theatlantic/python-active-directory,sfu-rcg/python-ad,geertj/python-ad,theatlantic/python-active-directory,sfu-rcg/python-ad
|
python
|
## Code Before:
from setuptools import setup, Extension
setup(
name = 'python-ad',
version = '0.9',
description = 'An AD client library for Python',
author = 'Geert Jansen',
author_email = '[email protected]',
url = 'http://code.google.com/p/python-ad',
license = 'MIT',
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python'],
package_dir = {'': 'lib'},
packages = ['ad', 'ad.core', 'ad.protocol', 'ad.util'],
install_requires = [ 'python-ldap', 'dnspython', 'ply' ],
ext_modules = [Extension('ad.protocol.krb5', ['lib/ad/protocol/krb5.c'],
libraries=['krb5'])],
test_suite = 'nose.collector'
)
## Instruction:
Change email address and home page.
## Code After:
from setuptools import setup, Extension
setup(
name = 'python-ad',
version = '0.9',
description = 'An AD client library for Python',
author = 'Geert Jansen',
author_email = '[email protected]',
url = 'https://github.com/geertj/python-ad',
license = 'MIT',
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python'],
package_dir = {'': 'lib'},
packages = ['ad', 'ad.core', 'ad.protocol', 'ad.util'],
install_requires = [ 'python-ldap', 'dnspython', 'ply' ],
ext_modules = [Extension('ad.protocol.krb5', ['lib/ad/protocol/krb5.c'],
libraries=['krb5'])],
test_suite = 'nose.collector'
)
|
// ... existing code ...
version = '0.9',
description = 'An AD client library for Python',
author = 'Geert Jansen',
author_email = '[email protected]',
url = 'https://github.com/geertj/python-ad',
license = 'MIT',
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
// ... rest of the code ...
|
fc830b0caf29fe1424bc8fe30afcf7e21d8ecd72
|
inbound.py
|
inbound.py
|
import logging, email, yaml
from django.utils import simplejson as json
from google.appengine.ext import webapp, deferred
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler
from google.appengine.api.urlfetch import fetch
settings = yaml.load(open('settings.yaml'))
def callback(raw):
result = {'email': {'raw': raw}}
fetch(settings['outbound_url'],
payload=json.dumps(result),
method="POST",
headers={
'Authorization': settings['api_key'],
'Content-Type': 'application/json'
}
)
class InboundHandler(InboundMailHandler):
def receive(self, message):
logging.info("Received a message from: " + message.sender)
deferred.defer(callback, message.original.as_string(True), _queue='inbound')
|
import logging, email, yaml
from django.utils import simplejson as json
from google.appengine.ext import webapp, deferred
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import Error as FetchError
settings = yaml.load(open('settings.yaml'))
def callback(raw):
result = {'email': {'raw': raw}}
response = fetch(settings['outbound_url'],
payload=json.dumps(result),
method="POST",
headers={
'Authorization': settings['api_key'],
'Content-Type': 'application/json'
},
deadline=10
)
logging.info(response.status_code)
if response.status_code != 200:
raise FetchError()
class InboundHandler(InboundMailHandler):
def receive(self, message):
logging.info("Received a message from: " + message.sender)
deferred.defer(callback, message.original.as_string(True), _queue='inbound')
|
Raise if response is not 200
|
Raise if response is not 200
|
Python
|
mit
|
maccman/remail-engine
|
python
|
## Code Before:
import logging, email, yaml
from django.utils import simplejson as json
from google.appengine.ext import webapp, deferred
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler
from google.appengine.api.urlfetch import fetch
settings = yaml.load(open('settings.yaml'))
def callback(raw):
result = {'email': {'raw': raw}}
fetch(settings['outbound_url'],
payload=json.dumps(result),
method="POST",
headers={
'Authorization': settings['api_key'],
'Content-Type': 'application/json'
}
)
class InboundHandler(InboundMailHandler):
def receive(self, message):
logging.info("Received a message from: " + message.sender)
deferred.defer(callback, message.original.as_string(True), _queue='inbound')
## Instruction:
Raise if response is not 200
## Code After:
import logging, email, yaml
from django.utils import simplejson as json
from google.appengine.ext import webapp, deferred
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import Error as FetchError
settings = yaml.load(open('settings.yaml'))
def callback(raw):
result = {'email': {'raw': raw}}
response = fetch(settings['outbound_url'],
payload=json.dumps(result),
method="POST",
headers={
'Authorization': settings['api_key'],
'Content-Type': 'application/json'
},
deadline=10
)
logging.info(response.status_code)
if response.status_code != 200:
raise FetchError()
class InboundHandler(InboundMailHandler):
def receive(self, message):
logging.info("Received a message from: " + message.sender)
deferred.defer(callback, message.original.as_string(True), _queue='inbound')
|
...
from google.appengine.ext import webapp, deferred
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import Error as FetchError
settings = yaml.load(open('settings.yaml'))
...
def callback(raw):
result = {'email': {'raw': raw}}
response = fetch(settings['outbound_url'],
payload=json.dumps(result),
method="POST",
headers={
'Authorization': settings['api_key'],
'Content-Type': 'application/json'
},
deadline=10
)
logging.info(response.status_code)
if response.status_code != 200:
raise FetchError()
class InboundHandler(InboundMailHandler):
def receive(self, message):
...
|
b82d85114c13f945cc1976606d4d36d5b4b2885a
|
phonenumber_field/formfields.py
|
phonenumber_field/formfields.py
|
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
|
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
|
Python
|
mit
|
bramd/django-phonenumber-field,bramd/django-phonenumber-field
|
python
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
## Instruction:
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
## Code After:
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
...
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
...
|
a8122088474be0b8b2479e52a75ebf25a5a386f1
|
lib/libz/zopenbsd.c
|
lib/libz/zopenbsd.c
|
/* $OpenBSD: zopenbsd.c,v 1.4 2015/01/20 04:41:01 krw Exp $ */
#include <sys/types.h>
#include <sys/malloc.h>
#include <lib/libz/zutil.h>
/*
* Space allocation and freeing routines for use by zlib routines.
*/
void *
zcalloc(notused, items, size)
void *notused;
u_int items, size;
{
return mallocarray(items, size, M_DEVBUF, M_NOWAIT);
}
void
zcfree(notused, ptr)
void *notused;
void *ptr;
{
free(ptr, M_DEVBUF, 0);
}
|
/*
* Space allocation and freeing routines for use by zlib routines.
*/
void *
zcalloc(notused, items, size)
void *notused;
u_int items, size;
{
return mallocarray(items, size, M_DEVBUF, M_NOWAIT);
}
void
zcfree(notused, ptr)
void *notused;
void *ptr;
{
free(ptr, M_DEVBUF, 0);
}
|
Revert some $OpenBSD$ additions about which there are doubts.
|
Revert some $OpenBSD$ additions about which there are doubts.
Suggested by deraadt@
|
C
|
isc
|
orumin/openbsd-efivars,orumin/openbsd-efivars,orumin/openbsd-efivars,orumin/openbsd-efivars
|
c
|
## Code Before:
/* $OpenBSD: zopenbsd.c,v 1.4 2015/01/20 04:41:01 krw Exp $ */
#include <sys/types.h>
#include <sys/malloc.h>
#include <lib/libz/zutil.h>
/*
* Space allocation and freeing routines for use by zlib routines.
*/
void *
zcalloc(notused, items, size)
void *notused;
u_int items, size;
{
return mallocarray(items, size, M_DEVBUF, M_NOWAIT);
}
void
zcfree(notused, ptr)
void *notused;
void *ptr;
{
free(ptr, M_DEVBUF, 0);
}
## Instruction:
Revert some $OpenBSD$ additions about which there are doubts.
Suggested by deraadt@
## Code After:
/*
* Space allocation and freeing routines for use by zlib routines.
*/
void *
zcalloc(notused, items, size)
void *notused;
u_int items, size;
{
return mallocarray(items, size, M_DEVBUF, M_NOWAIT);
}
void
zcfree(notused, ptr)
void *notused;
void *ptr;
{
free(ptr, M_DEVBUF, 0);
}
|
...
/*
* Space allocation and freeing routines for use by zlib routines.
...
|
df16cf042b6085f68b4263e24f29ad61be71cea8
|
nsswitch-internal.h
|
nsswitch-internal.h
|
/*
* nsswitch_internal.h
* Prototypes for some internal glibc functions that we use. Shhh.
*/
#ifndef NSSWITCH_INTERNAL_H
#define NSSWITCH_INTERNAL_H
#include "config.h"
/* glibc/config.h.in */
#if defined USE_REGPARMS && !defined PROF && !defined __BOUNDED_POINTERS__
# define internal_function __attribute__ ((regparm (3), stdcall))
#else
# define internal_function
#endif
/* glibc/nss/nsswitch.h */
typedef struct service_user service_user;
extern int __nss_next (service_user **ni, const char *fct_name, void **fctp,
int status, int all_values);
extern int __nss_database_lookup (const char *database,
const char *alternative_name,
const char *defconfig, service_user **ni);
extern void *__nss_lookup_function (service_user *ni, const char *fct_name);
/* glibc/nss/XXX-lookup.c */
extern int __nss_passwd_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
extern int __nss_group_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
#endif /* NSSWITCH_INTERNAL_H */
|
/*
* nsswitch_internal.h
* Prototypes for some internal glibc functions that we use. Shhh.
*/
#ifndef NSSWITCH_INTERNAL_H
#define NSSWITCH_INTERNAL_H
#include <features.h>
#include "config.h"
/* glibc/config.h.in */
#if __GLIBC_PREREQ(2, 27)
# define internal_function
#elif defined USE_REGPARMS && !defined PROF && !defined __BOUNDED_POINTERS__
# define internal_function __attribute__ ((regparm (3), stdcall))
#else
# define internal_function
#endif
/* glibc/nss/nsswitch.h */
typedef struct service_user service_user;
extern int __nss_next (service_user **ni, const char *fct_name, void **fctp,
int status, int all_values);
extern int __nss_database_lookup (const char *database,
const char *alternative_name,
const char *defconfig, service_user **ni);
extern void *__nss_lookup_function (service_user *ni, const char *fct_name);
/* glibc/nss/XXX-lookup.c */
extern int __nss_passwd_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
extern int __nss_group_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
#endif /* NSSWITCH_INTERNAL_H */
|
Update internal glibc functions ABI for glibc 2.27
|
Update internal glibc functions ABI for glibc 2.27
Signed-off-by: Anders Kaseorg <[email protected]>
|
C
|
lgpl-2.1
|
andersk/nss_nonlocal,andersk/nss_nonlocal
|
c
|
## Code Before:
/*
* nsswitch_internal.h
* Prototypes for some internal glibc functions that we use. Shhh.
*/
#ifndef NSSWITCH_INTERNAL_H
#define NSSWITCH_INTERNAL_H
#include "config.h"
/* glibc/config.h.in */
#if defined USE_REGPARMS && !defined PROF && !defined __BOUNDED_POINTERS__
# define internal_function __attribute__ ((regparm (3), stdcall))
#else
# define internal_function
#endif
/* glibc/nss/nsswitch.h */
typedef struct service_user service_user;
extern int __nss_next (service_user **ni, const char *fct_name, void **fctp,
int status, int all_values);
extern int __nss_database_lookup (const char *database,
const char *alternative_name,
const char *defconfig, service_user **ni);
extern void *__nss_lookup_function (service_user *ni, const char *fct_name);
/* glibc/nss/XXX-lookup.c */
extern int __nss_passwd_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
extern int __nss_group_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
#endif /* NSSWITCH_INTERNAL_H */
## Instruction:
Update internal glibc functions ABI for glibc 2.27
Signed-off-by: Anders Kaseorg <[email protected]>
## Code After:
/*
* nsswitch_internal.h
* Prototypes for some internal glibc functions that we use. Shhh.
*/
#ifndef NSSWITCH_INTERNAL_H
#define NSSWITCH_INTERNAL_H
#include <features.h>
#include "config.h"
/* glibc/config.h.in */
#if __GLIBC_PREREQ(2, 27)
# define internal_function
#elif defined USE_REGPARMS && !defined PROF && !defined __BOUNDED_POINTERS__
# define internal_function __attribute__ ((regparm (3), stdcall))
#else
# define internal_function
#endif
/* glibc/nss/nsswitch.h */
typedef struct service_user service_user;
extern int __nss_next (service_user **ni, const char *fct_name, void **fctp,
int status, int all_values);
extern int __nss_database_lookup (const char *database,
const char *alternative_name,
const char *defconfig, service_user **ni);
extern void *__nss_lookup_function (service_user *ni, const char *fct_name);
/* glibc/nss/XXX-lookup.c */
extern int __nss_passwd_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
extern int __nss_group_lookup (service_user **ni, const char *fct_name,
void **fctp) internal_function;
#endif /* NSSWITCH_INTERNAL_H */
|
// ... existing code ...
#ifndef NSSWITCH_INTERNAL_H
#define NSSWITCH_INTERNAL_H
#include <features.h>
#include "config.h"
/* glibc/config.h.in */
#if __GLIBC_PREREQ(2, 27)
# define internal_function
#elif defined USE_REGPARMS && !defined PROF && !defined __BOUNDED_POINTERS__
# define internal_function __attribute__ ((regparm (3), stdcall))
#else
# define internal_function
// ... rest of the code ...
|
45481b53b756642f2194ceef38d3c14f1f53d6a9
|
src/run_time_error.h
|
src/run_time_error.h
|
namespace setti {
namespace internal {
/**
* @brief Class to represent an run time error
*
* This class encapsulates run time error, and it is
* used to represent errors as symbol not found,
* command not found and out of range
*/
class RunTimeError : public std::exception {
public:
enum class ErrorCode: uint8_t{
NULL_ACCESS,
SYMBOL_NOT_FOUND,
CMD_NOT_FOUND,
OUT_OF_RANGE,
KEY_NOT_FOUND,
INCOMPATIBLE_TYPE
};
RunTimeError();
RunTimeError(ErrorCode code, const boost::format& msg)
: code_(code), msg_(msg) {}
virtual ~RunTimeError() noexcept = default;
/**
* @return the error description and the context as a text string.
*/
virtual const char* what() const noexcept {
msg_.str().c_str();
}
ErrorCode code_;
const boost::format& msg_;
};
}
}
#endif // SETI_EXCEPTION_H
|
namespace setti {
namespace internal {
/**
* @brief Class to represent an run time error
*
* This class encapsulates run time error, and it is
* used to represent errors as symbol not found,
* command not found and out of range
*/
class RunTimeError : public std::exception {
public:
enum class ErrorCode: uint8_t{
NULL_ACCESS,
SYMBOL_NOT_FOUND,
CMD_NOT_FOUND,
OUT_OF_RANGE,
KEY_NOT_FOUND,
INCOMPATIBLE_TYPE
};
RunTimeError();
RunTimeError(ErrorCode code, const boost::format& msg)
: code_(code), msg_(boost::str(msg)) {}
virtual ~RunTimeError() noexcept = default;
/**
* @return the error description and the context as a text string.
*/
virtual const char* what() const noexcept {
msg_.c_str();
}
ErrorCode code_;
std::string msg_;
};
}
}
#endif // SETI_EXCEPTION_H
|
Fix run time error message
|
Fix run time error message
|
C
|
apache-2.0
|
alexst07/shell-plus-plus,alexst07/shell-plus-plus,alexst07/setti,alexst07/seti,alexst07/shell-plus-plus
|
c
|
## Code Before:
namespace setti {
namespace internal {
/**
* @brief Class to represent an run time error
*
* This class encapsulates run time error, and it is
* used to represent errors as symbol not found,
* command not found and out of range
*/
class RunTimeError : public std::exception {
public:
enum class ErrorCode: uint8_t{
NULL_ACCESS,
SYMBOL_NOT_FOUND,
CMD_NOT_FOUND,
OUT_OF_RANGE,
KEY_NOT_FOUND,
INCOMPATIBLE_TYPE
};
RunTimeError();
RunTimeError(ErrorCode code, const boost::format& msg)
: code_(code), msg_(msg) {}
virtual ~RunTimeError() noexcept = default;
/**
* @return the error description and the context as a text string.
*/
virtual const char* what() const noexcept {
msg_.str().c_str();
}
ErrorCode code_;
const boost::format& msg_;
};
}
}
#endif // SETI_EXCEPTION_H
## Instruction:
Fix run time error message
## Code After:
namespace setti {
namespace internal {
/**
* @brief Class to represent an run time error
*
* This class encapsulates run time error, and it is
* used to represent errors as symbol not found,
* command not found and out of range
*/
class RunTimeError : public std::exception {
public:
enum class ErrorCode: uint8_t{
NULL_ACCESS,
SYMBOL_NOT_FOUND,
CMD_NOT_FOUND,
OUT_OF_RANGE,
KEY_NOT_FOUND,
INCOMPATIBLE_TYPE
};
RunTimeError();
RunTimeError(ErrorCode code, const boost::format& msg)
: code_(code), msg_(boost::str(msg)) {}
virtual ~RunTimeError() noexcept = default;
/**
* @return the error description and the context as a text string.
*/
virtual const char* what() const noexcept {
msg_.c_str();
}
ErrorCode code_;
std::string msg_;
};
}
}
#endif // SETI_EXCEPTION_H
|
# ... existing code ...
RunTimeError();
RunTimeError(ErrorCode code, const boost::format& msg)
: code_(code), msg_(boost::str(msg)) {}
virtual ~RunTimeError() noexcept = default;
# ... modified code ...
* @return the error description and the context as a text string.
*/
virtual const char* what() const noexcept {
msg_.c_str();
}
ErrorCode code_;
std::string msg_;
};
}
# ... rest of the code ...
|
818d6c9c98512088232bef0921074823afeff51e
|
src/Problem033.java
|
src/Problem033.java
|
// Project Euler problem 33
public class Problem033 {
public static void main(String[] args) {
System.out.println();
}
}
|
import java.util.ArrayList;
import java.util.HashSet;
// Project Euler problem 33
// Solved on 12/16/2016
public class Problem033 {
public static void main(String[] args) {
// numerator and denominator products of the valid fractions
int nCombined = 1;
int dCombined = 1;
// loop through valid fractions
for (int n = 11; n < 100; n++) {
for (int d = n; d < 100; d++) {
if (n % 10 == 0 || d % 10 == 0 || n == d) {
continue;
}
// create character array of the digits so they can be compared
char[] nArray = Integer.toString(n).toCharArray();
char[] dArray = Integer.toString(d).toCharArray();
Check:
// loop through digits and check for matches
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 2; j++) {
if (nArray[i] == dArray[j]) {
// cancel the matches and compare the fraction vale with the original value
int nRemaining = Character.getNumericValue(nArray[(i + 1) % 2]);
int dRemaining = Character.getNumericValue(dArray[(j + 1) % 2]);
if ((double)nRemaining / dRemaining == (double)n / d) {
nCombined *= n;
dCombined *= d;
break Check;
}
}
}
}
}
}
// use the Sieve of Eratosthenes to find primes
HashSet<Integer> sieve = new HashSet<>();
ArrayList<Integer> primes = new ArrayList<>();
for (int i = 2; i <= nCombined; i++) {
if (!sieve.contains(i)) {
primes.add(i);
int j = 1;
while (i * j <= nCombined) {
sieve.add(i * j);
j++;
}
}
}
// check for primes that reduce the fraction
for (int i = 0; i < primes.size(); i++) {
int tempPrime = primes.get(i);
if (nCombined % tempPrime == 0 && dCombined % tempPrime == 0) {
nCombined /= tempPrime;
dCombined /= tempPrime;
i--;
}
}
System.out.println(dCombined);
}
}
|
Add solution to problem 33
|
Add solution to problem 33
|
Java
|
mit
|
nsun200/ProjectEuler
|
java
|
## Code Before:
// Project Euler problem 33
public class Problem033 {
public static void main(String[] args) {
System.out.println();
}
}
## Instruction:
Add solution to problem 33
## Code After:
import java.util.ArrayList;
import java.util.HashSet;
// Project Euler problem 33
// Solved on 12/16/2016
public class Problem033 {
public static void main(String[] args) {
// numerator and denominator products of the valid fractions
int nCombined = 1;
int dCombined = 1;
// loop through valid fractions
for (int n = 11; n < 100; n++) {
for (int d = n; d < 100; d++) {
if (n % 10 == 0 || d % 10 == 0 || n == d) {
continue;
}
// create character array of the digits so they can be compared
char[] nArray = Integer.toString(n).toCharArray();
char[] dArray = Integer.toString(d).toCharArray();
Check:
// loop through digits and check for matches
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 2; j++) {
if (nArray[i] == dArray[j]) {
// cancel the matches and compare the fraction vale with the original value
int nRemaining = Character.getNumericValue(nArray[(i + 1) % 2]);
int dRemaining = Character.getNumericValue(dArray[(j + 1) % 2]);
if ((double)nRemaining / dRemaining == (double)n / d) {
nCombined *= n;
dCombined *= d;
break Check;
}
}
}
}
}
}
// use the Sieve of Eratosthenes to find primes
HashSet<Integer> sieve = new HashSet<>();
ArrayList<Integer> primes = new ArrayList<>();
for (int i = 2; i <= nCombined; i++) {
if (!sieve.contains(i)) {
primes.add(i);
int j = 1;
while (i * j <= nCombined) {
sieve.add(i * j);
j++;
}
}
}
// check for primes that reduce the fraction
for (int i = 0; i < primes.size(); i++) {
int tempPrime = primes.get(i);
if (nCombined % tempPrime == 0 && dCombined % tempPrime == 0) {
nCombined /= tempPrime;
dCombined /= tempPrime;
i--;
}
}
System.out.println(dCombined);
}
}
|
// ... existing code ...
import java.util.ArrayList;
import java.util.HashSet;
// Project Euler problem 33
// Solved on 12/16/2016
public class Problem033 {
public static void main(String[] args) {
// numerator and denominator products of the valid fractions
int nCombined = 1;
int dCombined = 1;
// loop through valid fractions
for (int n = 11; n < 100; n++) {
for (int d = n; d < 100; d++) {
if (n % 10 == 0 || d % 10 == 0 || n == d) {
continue;
}
// create character array of the digits so they can be compared
char[] nArray = Integer.toString(n).toCharArray();
char[] dArray = Integer.toString(d).toCharArray();
Check:
// loop through digits and check for matches
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 2; j++) {
if (nArray[i] == dArray[j]) {
// cancel the matches and compare the fraction vale with the original value
int nRemaining = Character.getNumericValue(nArray[(i + 1) % 2]);
int dRemaining = Character.getNumericValue(dArray[(j + 1) % 2]);
if ((double)nRemaining / dRemaining == (double)n / d) {
nCombined *= n;
dCombined *= d;
break Check;
}
}
}
}
}
}
// use the Sieve of Eratosthenes to find primes
HashSet<Integer> sieve = new HashSet<>();
ArrayList<Integer> primes = new ArrayList<>();
for (int i = 2; i <= nCombined; i++) {
if (!sieve.contains(i)) {
primes.add(i);
int j = 1;
while (i * j <= nCombined) {
sieve.add(i * j);
j++;
}
}
}
// check for primes that reduce the fraction
for (int i = 0; i < primes.size(); i++) {
int tempPrime = primes.get(i);
if (nCombined % tempPrime == 0 && dCombined % tempPrime == 0) {
nCombined /= tempPrime;
dCombined /= tempPrime;
i--;
}
}
System.out.println(dCombined);
}
}
// ... rest of the code ...
|
cdb10489382144f77dbe720f230ae92020ffb66c
|
messaging/test/test_message.py
|
messaging/test/test_message.py
|
"""Tests the message framework."""
import threading
import time
import unittest
from messaging.message_consumer import consume_messages
from messaging.message_producer import MessageProducer
class TestMessage(unittest.TestCase):
"""Tests the message framework."""
EXCHANGE = 'test'
def setUp(self):
self.message = None
def test_1_producer_1_consumer(self):
"""Test single producer single consumer."""
mp = MessageProducer(self.EXCHANGE)
def save_message(x):
self.message = x
def consume():
"""Function to consume messages."""
consume_messages(self.EXCHANGE, save_message)
consumer = threading.Thread(target=consume)
consumer.start()
# TODO(2016-07-10) Fix this race condition. It looks like if I send the
# message before the receiver has set up, the messages are never queued
# or something.
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
mp.kill()
consumer.join()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
if __name__ == '__main__':
unittest.main()
|
"""Tests the message framework."""
import threading
import time
import unittest
from messaging.message_consumer import consume_messages
from messaging.message_producer import MessageProducer
class TestMessage(unittest.TestCase):
"""Tests the message framework."""
EXCHANGE = 'test'
def setUp(self):
self.message = None
def test_1_producer_1_consumer(self):
"""Test single producer single consumer."""
mp = MessageProducer(self.EXCHANGE)
def save_message(x):
self.message = x
def consume():
"""Function to consume messages."""
consume_messages(self.EXCHANGE, save_message)
consumer = threading.Thread(target=consume)
consumer.start()
# Give the receiver some time to set up, see comment below
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
for _ in range(10):
# Because of a race condition, if the message is sent before the
# receiver has set up, the messages are never queued or something.
# Keep resending until the thread exits.
consumer.join(0.05)
if consumer.is_alive():
mp.publish(sent_message)
mp.publish('QUIT')
consumer.join(0.05)
self.assertFalse(consumer.is_alive())
mp.kill()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
if __name__ == '__main__':
unittest.main()
|
Make messaging test more reliable
|
Make messaging test more reliable
|
Python
|
mit
|
bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc
|
python
|
## Code Before:
"""Tests the message framework."""
import threading
import time
import unittest
from messaging.message_consumer import consume_messages
from messaging.message_producer import MessageProducer
class TestMessage(unittest.TestCase):
"""Tests the message framework."""
EXCHANGE = 'test'
def setUp(self):
self.message = None
def test_1_producer_1_consumer(self):
"""Test single producer single consumer."""
mp = MessageProducer(self.EXCHANGE)
def save_message(x):
self.message = x
def consume():
"""Function to consume messages."""
consume_messages(self.EXCHANGE, save_message)
consumer = threading.Thread(target=consume)
consumer.start()
# TODO(2016-07-10) Fix this race condition. It looks like if I send the
# message before the receiver has set up, the messages are never queued
# or something.
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
mp.kill()
consumer.join()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
if __name__ == '__main__':
unittest.main()
## Instruction:
Make messaging test more reliable
## Code After:
"""Tests the message framework."""
import threading
import time
import unittest
from messaging.message_consumer import consume_messages
from messaging.message_producer import MessageProducer
class TestMessage(unittest.TestCase):
"""Tests the message framework."""
EXCHANGE = 'test'
def setUp(self):
self.message = None
def test_1_producer_1_consumer(self):
"""Test single producer single consumer."""
mp = MessageProducer(self.EXCHANGE)
def save_message(x):
self.message = x
def consume():
"""Function to consume messages."""
consume_messages(self.EXCHANGE, save_message)
consumer = threading.Thread(target=consume)
consumer.start()
# Give the receiver some time to set up, see comment below
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
for _ in range(10):
# Because of a race condition, if the message is sent before the
# receiver has set up, the messages are never queued or something.
# Keep resending until the thread exits.
consumer.join(0.05)
if consumer.is_alive():
mp.publish(sent_message)
mp.publish('QUIT')
consumer.join(0.05)
self.assertFalse(consumer.is_alive())
mp.kill()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
consumer = threading.Thread(target=consume)
consumer.start()
# Give the receiver some time to set up, see comment below
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
for _ in range(10):
# Because of a race condition, if the message is sent before the
# receiver has set up, the messages are never queued or something.
# Keep resending until the thread exits.
consumer.join(0.05)
if consumer.is_alive():
mp.publish(sent_message)
mp.publish('QUIT')
consumer.join(0.05)
self.assertFalse(consumer.is_alive())
mp.kill()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
# ... rest of the code ...
|
0b8a2a3a0f010538dd30ce04ca1ce943347a04a8
|
django_fixmystreet/fmsproxy/models.py
|
django_fixmystreet/fmsproxy/models.py
|
from django.db import models
import logging
logger = logging.getLogger(__name__)
class FMSProxy(models.Model):
name = models.CharField(max_length=20, unique=True)
def __unicode__(self):
return self.name
def get_assign_payload(report):
creator = report.get_creator()
payload = {
"application": report.contractor.fmsproxy.name.lower(),
"report":{
"id": report.id,
"created_at": report.created.isoformat(),
"modified_at": report.modified.isoformat(),
"category": report.display_category(),
"pdf_url": report.get_pdf_url_pro(),
"address": report.address,
"address_number": report.address_number,
"postal_code": report.postalcode,
"municipality": report.get_address_commune_name(),
"creator": {
"type": "pro" if report.is_pro() else "citizen",
"first_name": creator.first_name,
"last_name": creator.last_name,
"phone": creator.telephone,
"email": creator.email,
},
"comments": None,
},
}
comments = report.active_comments()
if comments:
payload["report"]["comments"] = []
for comment in comments:
payload["report"]["comments"].append({
"created_at": comment.created.isoformat(),
"name": comment.get_display_name(),
"text": comment.text,
})
return payload
|
from django.db import models
import logging
logger = logging.getLogger(__name__)
class FMSProxy(models.Model):
name = models.CharField(max_length=20, unique=True)
def __unicode__(self):
return self.name
def get_assign_payload(report):
creator = report.get_creator()
payload = {
"application": report.contractor.fmsproxy.name.lower(),
"report":{
"id": report.id,
"created_at": report.created.isoformat(),
"modified_at": report.modified.isoformat(),
"category": report.display_category(),
"pdf_url": report.get_pdf_url_pro(),
"address": report.address,
"address_number": report.address_number,
"postal_code": report.postalcode,
"municipality": report.get_address_commune_name(),
"creator": {
"type": "pro" if report.is_pro() else "citizen",
"first_name": creator.first_name,
"last_name": creator.last_name,
"phone": creator.telephone,
"email": creator.email,
},
"comments": [],
},
}
comments = report.active_attachments_pro()
for comment in comments:
payload["report"]["comments"].append({
"created_at": comment.created.isoformat(),
"name": comment.get_display_name(),
"text": comment.text,
})
return payload
|
Use `active_attachments_pro` instead of `active_comments`.
|
Fix: Use `active_attachments_pro` instead of `active_comments`.
|
Python
|
agpl-3.0
|
IMIO/django-fixmystreet,IMIO/django-fixmystreet,IMIO/django-fixmystreet,IMIO/django-fixmystreet
|
python
|
## Code Before:
from django.db import models
import logging
logger = logging.getLogger(__name__)
class FMSProxy(models.Model):
name = models.CharField(max_length=20, unique=True)
def __unicode__(self):
return self.name
def get_assign_payload(report):
creator = report.get_creator()
payload = {
"application": report.contractor.fmsproxy.name.lower(),
"report":{
"id": report.id,
"created_at": report.created.isoformat(),
"modified_at": report.modified.isoformat(),
"category": report.display_category(),
"pdf_url": report.get_pdf_url_pro(),
"address": report.address,
"address_number": report.address_number,
"postal_code": report.postalcode,
"municipality": report.get_address_commune_name(),
"creator": {
"type": "pro" if report.is_pro() else "citizen",
"first_name": creator.first_name,
"last_name": creator.last_name,
"phone": creator.telephone,
"email": creator.email,
},
"comments": None,
},
}
comments = report.active_comments()
if comments:
payload["report"]["comments"] = []
for comment in comments:
payload["report"]["comments"].append({
"created_at": comment.created.isoformat(),
"name": comment.get_display_name(),
"text": comment.text,
})
return payload
## Instruction:
Fix: Use `active_attachments_pro` instead of `active_comments`.
## Code After:
from django.db import models
import logging
logger = logging.getLogger(__name__)
class FMSProxy(models.Model):
name = models.CharField(max_length=20, unique=True)
def __unicode__(self):
return self.name
def get_assign_payload(report):
creator = report.get_creator()
payload = {
"application": report.contractor.fmsproxy.name.lower(),
"report":{
"id": report.id,
"created_at": report.created.isoformat(),
"modified_at": report.modified.isoformat(),
"category": report.display_category(),
"pdf_url": report.get_pdf_url_pro(),
"address": report.address,
"address_number": report.address_number,
"postal_code": report.postalcode,
"municipality": report.get_address_commune_name(),
"creator": {
"type": "pro" if report.is_pro() else "citizen",
"first_name": creator.first_name,
"last_name": creator.last_name,
"phone": creator.telephone,
"email": creator.email,
},
"comments": [],
},
}
comments = report.active_attachments_pro()
for comment in comments:
payload["report"]["comments"].append({
"created_at": comment.created.isoformat(),
"name": comment.get_display_name(),
"text": comment.text,
})
return payload
|
...
"phone": creator.telephone,
"email": creator.email,
},
"comments": [],
},
}
comments = report.active_attachments_pro()
for comment in comments:
payload["report"]["comments"].append({
"created_at": comment.created.isoformat(),
"name": comment.get_display_name(),
"text": comment.text,
})
return payload
...
|
eaff795bddb0e07f4ad4e4c9277c5c0f6f199380
|
salt/beacons/__init__.py
|
salt/beacons/__init__.py
|
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
ret.append({'tag': tag, 'data': data})
return ret
|
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
|
Add id tot he beacon event dataset
|
Add id tot he beacon event dataset
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
python
|
## Code Before:
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
ret.append({'tag': tag, 'data': data})
return ret
## Instruction:
Add id tot he beacon event dataset
## Code After:
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
|
# ... existing code ...
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
# ... rest of the code ...
|
7ef1717f34360ae48f640439fd6d6706ae755e90
|
functional_tests/base.py
|
functional_tests/base.py
|
from selenium import webdriver
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.core.cache import cache
class BrowserTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
self.browser.set_window_size(1024, 768)
def tearDown(self):
self.browser.quit()
cache.clear()
|
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.chrome.options import Options
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.core.cache import cache
class BrowserTest(StaticLiveServerTestCase):
def setUp(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--window-size=1920x1080")
self.browser = WebDriver(chrome_options=chrome_options)
self.browser.set_window_size(1024, 768)
def tearDown(self):
self.browser.quit()
cache.clear()
|
Use headless chrome for functional test
|
Use headless chrome for functional test
|
Python
|
mit
|
essanpupil/cashflow,essanpupil/cashflow
|
python
|
## Code Before:
from selenium import webdriver
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.core.cache import cache
class BrowserTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
self.browser.set_window_size(1024, 768)
def tearDown(self):
self.browser.quit()
cache.clear()
## Instruction:
Use headless chrome for functional test
## Code After:
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.chrome.options import Options
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.core.cache import cache
class BrowserTest(StaticLiveServerTestCase):
def setUp(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--window-size=1920x1080")
self.browser = WebDriver(chrome_options=chrome_options)
self.browser.set_window_size(1024, 768)
def tearDown(self):
self.browser.quit()
cache.clear()
|
// ... existing code ...
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.chrome.options import Options
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.core.cache import cache
// ... modified code ...
class BrowserTest(StaticLiveServerTestCase):
def setUp(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--window-size=1920x1080")
self.browser = WebDriver(chrome_options=chrome_options)
self.browser.set_window_size(1024, 768)
def tearDown(self):
// ... rest of the code ...
|
d43a08706f3072a0b97d01526ffd0de0d4a4110c
|
niworkflows/conftest.py
|
niworkflows/conftest.py
|
"""py.test configuration"""
import os
from pathlib import Path
import numpy
import pytest
from .utils.bids import collect_data
test_data_env = os.getenv('TEST_DATA_HOME',
str(Path.home() / '.cache' / 'stanford-crn'))
data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054'
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace['np'] = numpy
doctest_namespace['os'] = os
doctest_namespace['Path'] = Path
doctest_namespace['datadir'] = data_dir
doctest_namespace['bids_collect_data'] = collect_data
@pytest.fixture
def testdata_dir():
return data_dir
|
"""py.test configuration"""
import os
from pathlib import Path
import numpy as np
import nibabel as nb
import pytest
import tempfile
from .utils.bids import collect_data
test_data_env = os.getenv('TEST_DATA_HOME',
str(Path.home() / '.cache' / 'stanford-crn'))
data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054'
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace['np'] = np
doctest_namespace['os'] = os
doctest_namespace['Path'] = Path
doctest_namespace['datadir'] = data_dir
doctest_namespace['bids_collect_data'] = collect_data
tmpdir = tempfile.TemporaryDirectory()
nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz')
nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname)
doctest_namespace['nifti_fname'] = nifti_fname
yield
tmpdir.cleanup()
@pytest.fixture
def testdata_dir():
return data_dir
|
Make nifti_fname available to doctests
|
DOCTEST: Make nifti_fname available to doctests
|
Python
|
apache-2.0
|
oesteban/niworkflows,oesteban/niworkflows,poldracklab/niworkflows,oesteban/niworkflows,poldracklab/niworkflows
|
python
|
## Code Before:
"""py.test configuration"""
import os
from pathlib import Path
import numpy
import pytest
from .utils.bids import collect_data
test_data_env = os.getenv('TEST_DATA_HOME',
str(Path.home() / '.cache' / 'stanford-crn'))
data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054'
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace['np'] = numpy
doctest_namespace['os'] = os
doctest_namespace['Path'] = Path
doctest_namespace['datadir'] = data_dir
doctest_namespace['bids_collect_data'] = collect_data
@pytest.fixture
def testdata_dir():
return data_dir
## Instruction:
DOCTEST: Make nifti_fname available to doctests
## Code After:
"""py.test configuration"""
import os
from pathlib import Path
import numpy as np
import nibabel as nb
import pytest
import tempfile
from .utils.bids import collect_data
test_data_env = os.getenv('TEST_DATA_HOME',
str(Path.home() / '.cache' / 'stanford-crn'))
data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054'
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace['np'] = np
doctest_namespace['os'] = os
doctest_namespace['Path'] = Path
doctest_namespace['datadir'] = data_dir
doctest_namespace['bids_collect_data'] = collect_data
tmpdir = tempfile.TemporaryDirectory()
nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz')
nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname)
doctest_namespace['nifti_fname'] = nifti_fname
yield
tmpdir.cleanup()
@pytest.fixture
def testdata_dir():
return data_dir
|
...
"""py.test configuration"""
import os
from pathlib import Path
import numpy as np
import nibabel as nb
import pytest
import tempfile
from .utils.bids import collect_data
...
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace['np'] = np
doctest_namespace['os'] = os
doctest_namespace['Path'] = Path
doctest_namespace['datadir'] = data_dir
doctest_namespace['bids_collect_data'] = collect_data
tmpdir = tempfile.TemporaryDirectory()
nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz')
nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname)
doctest_namespace['nifti_fname'] = nifti_fname
yield
tmpdir.cleanup()
@pytest.fixture
def testdata_dir():
...
|
8a57504c1323b9c0309142c9591be60111b51bc8
|
org.eclipse.dawnsci.analysis.api/src/org/eclipse/dawnsci/analysis/api/EventTracker.java
|
org.eclipse.dawnsci.analysis.api/src/org/eclipse/dawnsci/analysis/api/EventTracker.java
|
/*-
* Copyright 2015 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.api;
public interface EventTracker {
/**
* This method is used to track events (log, action, exception etc) in the application.
*
* @param name
* Unique name (human readable without space) of event to be tracked
* Example: Data_Browsing_Perspective_launch
* @throws Exception
*/
public void track(String name) throws Exception;
}
|
/*-
* Copyright 2015 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.api;
public interface EventTracker {
/**
* This method is used to track events (log, action, exception etc) in the application.
*
* @param name
* Unique name (human readable without space) of event to be tracked
* Example: Data_Browsing_Perspective_launch
* @throws Exception
*/
public void track(String name) throws Exception;
/**
* This method is used to track events given an id and a label.
*
* @param id
* Unique id
* @param label
* Name
*/
public void track(String id, String label) throws Exception;
}
|
Add track method with id and label parameter
|
Add track method with id and label parameter
|
Java
|
epl-1.0
|
jamesmudd/dawnsci,colinpalmer/dawnsci,willrogers/dawnsci,DawnScience/dawnsci,PeterC-DLS/org.eclipse.dataset,jamesmudd/dawnsci,eclipse/dawnsci,Anthchirp/dawnsci,xen-0/dawnsci,Anthchirp/dawnsci,jamesmudd/dawnsci,colinpalmer/dawnsci,jonahkichwacoders/dawnsci,jonahkichwacoders/dawnsci,PeterC-DLS/org.eclipse.dataset,belkassaby/dawnsci,willrogers/dawnsci
|
java
|
## Code Before:
/*-
* Copyright 2015 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.api;
public interface EventTracker {
/**
* This method is used to track events (log, action, exception etc) in the application.
*
* @param name
* Unique name (human readable without space) of event to be tracked
* Example: Data_Browsing_Perspective_launch
* @throws Exception
*/
public void track(String name) throws Exception;
}
## Instruction:
Add track method with id and label parameter
## Code After:
/*-
* Copyright 2015 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.api;
public interface EventTracker {
/**
* This method is used to track events (log, action, exception etc) in the application.
*
* @param name
* Unique name (human readable without space) of event to be tracked
* Example: Data_Browsing_Perspective_launch
* @throws Exception
*/
public void track(String name) throws Exception;
/**
* This method is used to track events given an id and a label.
*
* @param id
* Unique id
* @param label
* Name
*/
public void track(String id, String label) throws Exception;
}
|
// ... existing code ...
*/
public void track(String name) throws Exception;
/**
* This method is used to track events given an id and a label.
*
* @param id
* Unique id
* @param label
* Name
*/
public void track(String id, String label) throws Exception;
}
// ... rest of the code ...
|
9accbde96f493ba795eef3d102a41aeecc039dce
|
grep_sal_code.py
|
grep_sal_code.py
|
import argparse
import subprocess
import sys
EXCLUSIONS = ['*.pyc', '*.log', 'venv*', 'static/*', 'site_static/*', 'datatableview/*', '*.db']
def main():
args = parse_args()
# Normally we like to build subprocess commands in lists, but it's
# a lot easier to do all of the globbing we want with shell=True,
# so we'll build up a string.
cmd = 'grep -R --colour=always '
cmd += " ".join("--exclude='{}'".format(i) for i in EXCLUSIONS)
for option in args.options or []:
cmd += ' -{}'.format(option)
cmd += " '{}'".format(r'\|'.join(args.search_terms))
cmd += ' *'
try:
results = subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError:
# Most common error is that there are no results!
results = ''
print results.strip()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('search_terms', nargs='*')
parser.add_argument('--options', nargs='*')
return parser.parse_args()
if __name__ == "__main__":
main()
|
import argparse
import os
import subprocess
import sys
EXCLUSIONS = ['*.pyc', '*.log', 'venv*', 'static/*', 'site_static/*', 'datatableview/*', '*.db']
def main():
args = parse_args()
# Normally we like to build subprocess commands in lists, but it's
# a lot easier to do all of the globbing we want with shell=True,
# so we'll build up a string.
cmd = 'grep -R --colour=always '
cmd += " ".join("--exclude='{}'".format(i) for i in EXCLUSIONS)
options = args.options if args.options else []
for option in options:
cmd += ' -{}'.format(option)
if args.edit and 'l' not in options:
cmd += ' -l'
cmd += " '{}'".format(r'\|'.join(args.search_terms))
cmd += ' *'
try:
results = subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError:
# Most common error is that there are no results!
results = ''
print results.strip()
if args.edit:
subprocess.check_call([os.getenv('EDITOR')] + [l.strip() for l in results.splitlines()])
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('search_terms', nargs='*')
parser.add_argument('--options', nargs='*')
msg = 'Open files with matches in {}.'.format(os.getenv('EDITOR') or '<No EDITOR set>')
parser.add_argument('--edit', action='store_true', help=msg)
return parser.parse_args()
if __name__ == "__main__":
main()
|
Add straight-to-editor feature to grep script.
|
Add straight-to-editor feature to grep script.
|
Python
|
apache-2.0
|
sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal
|
python
|
## Code Before:
import argparse
import subprocess
import sys
EXCLUSIONS = ['*.pyc', '*.log', 'venv*', 'static/*', 'site_static/*', 'datatableview/*', '*.db']
def main():
args = parse_args()
# Normally we like to build subprocess commands in lists, but it's
# a lot easier to do all of the globbing we want with shell=True,
# so we'll build up a string.
cmd = 'grep -R --colour=always '
cmd += " ".join("--exclude='{}'".format(i) for i in EXCLUSIONS)
for option in args.options or []:
cmd += ' -{}'.format(option)
cmd += " '{}'".format(r'\|'.join(args.search_terms))
cmd += ' *'
try:
results = subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError:
# Most common error is that there are no results!
results = ''
print results.strip()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('search_terms', nargs='*')
parser.add_argument('--options', nargs='*')
return parser.parse_args()
if __name__ == "__main__":
main()
## Instruction:
Add straight-to-editor feature to grep script.
## Code After:
import argparse
import os
import subprocess
import sys
EXCLUSIONS = ['*.pyc', '*.log', 'venv*', 'static/*', 'site_static/*', 'datatableview/*', '*.db']
def main():
args = parse_args()
# Normally we like to build subprocess commands in lists, but it's
# a lot easier to do all of the globbing we want with shell=True,
# so we'll build up a string.
cmd = 'grep -R --colour=always '
cmd += " ".join("--exclude='{}'".format(i) for i in EXCLUSIONS)
options = args.options if args.options else []
for option in options:
cmd += ' -{}'.format(option)
if args.edit and 'l' not in options:
cmd += ' -l'
cmd += " '{}'".format(r'\|'.join(args.search_terms))
cmd += ' *'
try:
results = subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError:
# Most common error is that there are no results!
results = ''
print results.strip()
if args.edit:
subprocess.check_call([os.getenv('EDITOR')] + [l.strip() for l in results.splitlines()])
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('search_terms', nargs='*')
parser.add_argument('--options', nargs='*')
msg = 'Open files with matches in {}.'.format(os.getenv('EDITOR') or '<No EDITOR set>')
parser.add_argument('--edit', action='store_true', help=msg)
return parser.parse_args()
if __name__ == "__main__":
main()
|
# ... existing code ...
import argparse
import os
import subprocess
import sys
# ... modified code ...
# so we'll build up a string.
cmd = 'grep -R --colour=always '
cmd += " ".join("--exclude='{}'".format(i) for i in EXCLUSIONS)
options = args.options if args.options else []
for option in options:
cmd += ' -{}'.format(option)
if args.edit and 'l' not in options:
cmd += ' -l'
cmd += " '{}'".format(r'\|'.join(args.search_terms))
cmd += ' *'
...
results = ''
print results.strip()
if args.edit:
subprocess.check_call([os.getenv('EDITOR')] + [l.strip() for l in results.splitlines()])
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('search_terms', nargs='*')
parser.add_argument('--options', nargs='*')
msg = 'Open files with matches in {}.'.format(os.getenv('EDITOR') or '<No EDITOR set>')
parser.add_argument('--edit', action='store_true', help=msg)
return parser.parse_args()
# ... rest of the code ...
|
c9449516bc3bfd15873347d1233001c51939a5e6
|
pipeline/utils/backend_helper.py
|
pipeline/utils/backend_helper.py
|
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/99-bottles-demo',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
|
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
|
Fix the bad jobboard path.
|
Fix the bad jobboard path.
Change-Id: I3281babfa835d7d4b76f7f299887959fa5342e85
|
Python
|
apache-2.0
|
ethanbao/artman,ethanbao/artman,googleapis/artman,googleapis/artman,shinfan/artman,googleapis/artman
|
python
|
## Code Before:
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/99-bottles-demo',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
## Instruction:
Fix the bad jobboard path.
Change-Id: I3281babfa835d7d4b76f7f299887959fa5342e85
## Code After:
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
|
...
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
...
|
da51183e64875119377d3dd5ffe85c958c23fc16
|
moksha/api/widgets/flot/flot.py
|
moksha/api/widgets/flot/flot.py
|
from tw.jquery.flot import FlotWidget
from moksha.api.widgets import LiveWidget
class LiveFlotWidget(LiveWidget):
""" A live graphing widget """
topic = 'flot_demo'
params = ['id', 'data', 'options', 'height', 'width', 'onmessage']
children = [FlotWidget('flot')]
onmessage = '$.plot($("#${id}"),json[0]["data"],json[0]["options"])'
template = '<div id="${id}" style="width:${width};height:${height};" />'
height = '250px'
width = '390px'
options = {}
data = [{}]
|
from tw.jquery.flot import flot_js, excanvas_js, flot_css
from moksha.api.widgets import LiveWidget
class LiveFlotWidget(LiveWidget):
""" A live graphing widget """
topic = 'flot_demo'
params = ['id', 'data', 'options', 'height', 'width', 'onmessage']
onmessage = '$.plot($("#${id}"),json[0]["data"],json[0]["options"])'
template = '<div id="${id}" style="width:${width};height:${height};" />'
javascript = [flot_js, excanvas_js]
css = [flot_css]
height = '250px'
width = '390px'
options = {}
data = [{}]
|
Make our LiveFlotWidget not pull in jQuery
|
Make our LiveFlotWidget not pull in jQuery
|
Python
|
apache-2.0
|
lmacken/moksha,ralphbean/moksha,pombredanne/moksha,ralphbean/moksha,ralphbean/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,lmacken/moksha,lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha
|
python
|
## Code Before:
from tw.jquery.flot import FlotWidget
from moksha.api.widgets import LiveWidget
class LiveFlotWidget(LiveWidget):
""" A live graphing widget """
topic = 'flot_demo'
params = ['id', 'data', 'options', 'height', 'width', 'onmessage']
children = [FlotWidget('flot')]
onmessage = '$.plot($("#${id}"),json[0]["data"],json[0]["options"])'
template = '<div id="${id}" style="width:${width};height:${height};" />'
height = '250px'
width = '390px'
options = {}
data = [{}]
## Instruction:
Make our LiveFlotWidget not pull in jQuery
## Code After:
from tw.jquery.flot import flot_js, excanvas_js, flot_css
from moksha.api.widgets import LiveWidget
class LiveFlotWidget(LiveWidget):
""" A live graphing widget """
topic = 'flot_demo'
params = ['id', 'data', 'options', 'height', 'width', 'onmessage']
onmessage = '$.plot($("#${id}"),json[0]["data"],json[0]["options"])'
template = '<div id="${id}" style="width:${width};height:${height};" />'
javascript = [flot_js, excanvas_js]
css = [flot_css]
height = '250px'
width = '390px'
options = {}
data = [{}]
|
# ... existing code ...
from tw.jquery.flot import flot_js, excanvas_js, flot_css
from moksha.api.widgets import LiveWidget
class LiveFlotWidget(LiveWidget):
# ... modified code ...
""" A live graphing widget """
topic = 'flot_demo'
params = ['id', 'data', 'options', 'height', 'width', 'onmessage']
onmessage = '$.plot($("#${id}"),json[0]["data"],json[0]["options"])'
template = '<div id="${id}" style="width:${width};height:${height};" />'
javascript = [flot_js, excanvas_js]
css = [flot_css]
height = '250px'
width = '390px'
options = {}
# ... rest of the code ...
|
0701e34c76a4ea55b1334c9b48c88fd346f49fa2
|
nazs/apps.py
|
nazs/apps.py
|
from django.apps import AppConfig
import os
import pkg_resources
class NAZSConfig(AppConfig):
name = 'nazs'
def ready(self):
from django.core import management
from django.conf import settings
from .sudo import set_euid
set_euid()
# Sync volatile db and set permissions
volatile_db = settings.DATABASES['volatile']['NAME']
management.call_command('syncdb',
database='volatile',
interactive=False,
verbosity=0)
os.chmod(volatile_db, 0600)
# Load all modules
for app in pkg_resources.iter_entry_points('nazs.app'):
__import__(app.module_name + '.module')
|
from django.apps import AppConfig
import pkg_resources
class NAZSConfig(AppConfig):
name = 'nazs'
def ready(self):
from .sudo import set_euid
set_euid()
# Load all modules
for app in pkg_resources.iter_entry_points('nazs.app'):
__import__(app.module_name + '.module')
|
Stop auto creation of shm database
|
Stop auto creation of shm database
|
Python
|
agpl-3.0
|
exekias/droplet,exekias/droplet,exekias/droplet
|
python
|
## Code Before:
from django.apps import AppConfig
import os
import pkg_resources
class NAZSConfig(AppConfig):
name = 'nazs'
def ready(self):
from django.core import management
from django.conf import settings
from .sudo import set_euid
set_euid()
# Sync volatile db and set permissions
volatile_db = settings.DATABASES['volatile']['NAME']
management.call_command('syncdb',
database='volatile',
interactive=False,
verbosity=0)
os.chmod(volatile_db, 0600)
# Load all modules
for app in pkg_resources.iter_entry_points('nazs.app'):
__import__(app.module_name + '.module')
## Instruction:
Stop auto creation of shm database
## Code After:
from django.apps import AppConfig
import pkg_resources
class NAZSConfig(AppConfig):
name = 'nazs'
def ready(self):
from .sudo import set_euid
set_euid()
# Load all modules
for app in pkg_resources.iter_entry_points('nazs.app'):
__import__(app.module_name + '.module')
|
...
from django.apps import AppConfig
import pkg_resources
...
name = 'nazs'
def ready(self):
from .sudo import set_euid
set_euid()
# Load all modules
for app in pkg_resources.iter_entry_points('nazs.app'):
...
|
893e821a0bdd9807f243465301e9360e909e2ad0
|
lib/log.h
|
lib/log.h
|
/* $Id$ */
#ifndef __log_h__
#define __log_h__
#include "internal.h"
#define UNSHIELD_LOG_LEVEL_LOWEST 0
#define UNSHIELD_LOG_LEVEL_ERROR 1
#define UNSHIELD_LOG_LEVEL_WARNING 2
#define UNSHIELD_LOG_LEVEL_TRACE 3
#define UNSHIELD_LOG_LEVEL_HIGHEST 4
#ifdef __cplusplus
extern "C"
{
#endif
void _unshield_log(int level, const char* file, int line, const char* format, ...);
#define unshield_trace(format, args...) \
_unshield_log(UNSHIELD_LOG_LEVEL_TRACE,__FUNCTION__, __LINE__, format, ##args)
#define unshield_warning(format, args...) \
_unshield_log(UNSHIELD_LOG_LEVEL_WARNING,__FUNCTION__, __LINE__, format, ##args)
#define unshield_warning_unless(cond, format, args...) \
if (!(cond)) \
_unshield_log(UNSHIELD_LOG_LEVEL_WARNING,__FUNCTION__, __LINE__, format, ##args)
#define unshield_error(format, args...) \
_unshield_log(UNSHIELD_LOG_LEVEL_ERROR,__FUNCTION__, __LINE__, format, ##args)
#ifdef __cplusplus
}
#endif
#endif
|
/* $Id$ */
#ifndef __log_h__
#define __log_h__
#include "internal.h"
#define UNSHIELD_LOG_LEVEL_LOWEST 0
#define UNSHIELD_LOG_LEVEL_ERROR 1
#define UNSHIELD_LOG_LEVEL_WARNING 2
#define UNSHIELD_LOG_LEVEL_TRACE 3
#define UNSHIELD_LOG_LEVEL_HIGHEST 4
#ifdef __cplusplus
extern "C"
{
#endif
void _unshield_log(int level, const char* file, int line, const char* format, ...);
#define unshield_trace(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_TRACE,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#define unshield_warning(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_WARNING,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#define unshield_error(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_ERROR,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#ifdef __cplusplus
}
#endif
#endif
|
Use __VA_ARGS__ for better portability
|
Use __VA_ARGS__ for better portability
|
C
|
mit
|
twogood/unshield,twogood/unshield
|
c
|
## Code Before:
/* $Id$ */
#ifndef __log_h__
#define __log_h__
#include "internal.h"
#define UNSHIELD_LOG_LEVEL_LOWEST 0
#define UNSHIELD_LOG_LEVEL_ERROR 1
#define UNSHIELD_LOG_LEVEL_WARNING 2
#define UNSHIELD_LOG_LEVEL_TRACE 3
#define UNSHIELD_LOG_LEVEL_HIGHEST 4
#ifdef __cplusplus
extern "C"
{
#endif
void _unshield_log(int level, const char* file, int line, const char* format, ...);
#define unshield_trace(format, args...) \
_unshield_log(UNSHIELD_LOG_LEVEL_TRACE,__FUNCTION__, __LINE__, format, ##args)
#define unshield_warning(format, args...) \
_unshield_log(UNSHIELD_LOG_LEVEL_WARNING,__FUNCTION__, __LINE__, format, ##args)
#define unshield_warning_unless(cond, format, args...) \
if (!(cond)) \
_unshield_log(UNSHIELD_LOG_LEVEL_WARNING,__FUNCTION__, __LINE__, format, ##args)
#define unshield_error(format, args...) \
_unshield_log(UNSHIELD_LOG_LEVEL_ERROR,__FUNCTION__, __LINE__, format, ##args)
#ifdef __cplusplus
}
#endif
#endif
## Instruction:
Use __VA_ARGS__ for better portability
## Code After:
/* $Id$ */
#ifndef __log_h__
#define __log_h__
#include "internal.h"
#define UNSHIELD_LOG_LEVEL_LOWEST 0
#define UNSHIELD_LOG_LEVEL_ERROR 1
#define UNSHIELD_LOG_LEVEL_WARNING 2
#define UNSHIELD_LOG_LEVEL_TRACE 3
#define UNSHIELD_LOG_LEVEL_HIGHEST 4
#ifdef __cplusplus
extern "C"
{
#endif
void _unshield_log(int level, const char* file, int line, const char* format, ...);
#define unshield_trace(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_TRACE,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#define unshield_warning(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_WARNING,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#define unshield_error(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_ERROR,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#ifdef __cplusplus
}
#endif
#endif
|
// ... existing code ...
void _unshield_log(int level, const char* file, int line, const char* format, ...);
#define unshield_trace(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_TRACE,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#define unshield_warning(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_WARNING,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#define unshield_error(format, ...) \
_unshield_log(UNSHIELD_LOG_LEVEL_ERROR,__FUNCTION__, __LINE__, format, ##__VA_ARGS__)
#ifdef __cplusplus
}
// ... rest of the code ...
|
8623aae8778307648e4a0380d84ca7dc7a63f3f2
|
oneflow/core/context_processors.py
|
oneflow/core/context_processors.py
|
from .models.nonrel import User
def mongodb_user(request):
if request.user.is_anonymous():
return {u'mongodb_user': None}
try:
mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id'])
except KeyError:
mongodb_user = User.objects.get(django_user=request.user.id)
# Cache it for next time.
request.session[u'mongodb_user_id'] = mongodb_user.id
return {u'mongodb_user': mongodb_user}
|
def mongodb_user(request):
""" not the most usefull context manager in the world. """
if request.user.is_anonymous():
return {u'mongodb_user': None}
return {u'mongodb_user': request.user.mongo}
|
Simplify the context processor. Not very useful anymore, in fact.
|
Simplify the context processor. Not very useful anymore, in fact.
|
Python
|
agpl-3.0
|
1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow
|
python
|
## Code Before:
from .models.nonrel import User
def mongodb_user(request):
if request.user.is_anonymous():
return {u'mongodb_user': None}
try:
mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id'])
except KeyError:
mongodb_user = User.objects.get(django_user=request.user.id)
# Cache it for next time.
request.session[u'mongodb_user_id'] = mongodb_user.id
return {u'mongodb_user': mongodb_user}
## Instruction:
Simplify the context processor. Not very useful anymore, in fact.
## Code After:
def mongodb_user(request):
""" not the most usefull context manager in the world. """
if request.user.is_anonymous():
return {u'mongodb_user': None}
return {u'mongodb_user': request.user.mongo}
|
# ... existing code ...
def mongodb_user(request):
""" not the most usefull context manager in the world. """
if request.user.is_anonymous():
return {u'mongodb_user': None}
return {u'mongodb_user': request.user.mongo}
# ... rest of the code ...
|
9f05a8917ee6fd01a334ef2e1e57062be8ef13af
|
byceps/config_defaults.py
|
byceps/config_defaults.py
|
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
Enable DBMS pool pre-pinging to avoid connection errors
|
Enable DBMS pool pre-pinging to avoid connection errors
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
python
|
## Code Before:
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
## Instruction:
Enable DBMS pool pre-pinging to avoid connection errors
## Code After:
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
...
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
...
|
34c6947f3b887bc448fd9e373f3bd9ed955443ee
|
fund/src/main/java/cl/fatman/capital/fund/App.java
|
fund/src/main/java/cl/fatman/capital/fund/App.java
|
package cl.fatman.capital.fund;
/**
* Hello world!
*
*/
public class App
{
public static void main( String[] args )
{
System.out.println( "Hello World!" );
}
}
|
package cl.fatman.capital.fund;
import java.time.LocalDate;
public class App
{
public static void main( String[] args )
{
Controller control = Controller.getInstance();
control.setUp();
System.out.println( "Starting application that load the funds data in the database.");
LocalDate endDate = LocalDate.now();
LocalDate startDate = endDate.minusDays(4);
control.storeFundData(startDate, endDate);
control.tearDown();
System.out.println( "Finishing application that load the funds data in the database.");
}
}
|
Add the application that load fund data in the database.
|
Add the application that load fund data in the database.
|
Java
|
mit
|
mparra-mpz/Capital
|
java
|
## Code Before:
package cl.fatman.capital.fund;
/**
* Hello world!
*
*/
public class App
{
public static void main( String[] args )
{
System.out.println( "Hello World!" );
}
}
## Instruction:
Add the application that load fund data in the database.
## Code After:
package cl.fatman.capital.fund;
import java.time.LocalDate;
public class App
{
public static void main( String[] args )
{
Controller control = Controller.getInstance();
control.setUp();
System.out.println( "Starting application that load the funds data in the database.");
LocalDate endDate = LocalDate.now();
LocalDate startDate = endDate.minusDays(4);
control.storeFundData(startDate, endDate);
control.tearDown();
System.out.println( "Finishing application that load the funds data in the database.");
}
}
|
# ... existing code ...
package cl.fatman.capital.fund;
import java.time.LocalDate;
public class App
{
public static void main( String[] args )
{
Controller control = Controller.getInstance();
control.setUp();
System.out.println( "Starting application that load the funds data in the database.");
LocalDate endDate = LocalDate.now();
LocalDate startDate = endDate.minusDays(4);
control.storeFundData(startDate, endDate);
control.tearDown();
System.out.println( "Finishing application that load the funds data in the database.");
}
}
# ... rest of the code ...
|
b89115165c55e51e76a533ba4eb9637897319e0a
|
oidc_provider/management/commands/creatersakey.py
|
oidc_provider/management/commands/creatersakey.py
|
from Crypto.PublicKey import RSA
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(1024)
file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem'
with open(file_path, 'w') as f:
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
self.stdout.write('Something goes wrong: ' + e.message)
|
from Crypto.PublicKey import RSA
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(1024)
file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem'
with open(file_path, 'w') as f:
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e))
|
Fix use of deprecated Exception.message in Python 3
|
Fix use of deprecated Exception.message in Python 3
|
Python
|
mit
|
torreco/django-oidc-provider,ByteInternet/django-oidc-provider,wojtek-fliposports/django-oidc-provider,wayward710/django-oidc-provider,bunnyinc/django-oidc-provider,juanifioren/django-oidc-provider,wojtek-fliposports/django-oidc-provider,ByteInternet/django-oidc-provider,torreco/django-oidc-provider,bunnyinc/django-oidc-provider,juanifioren/django-oidc-provider,wayward710/django-oidc-provider
|
python
|
## Code Before:
from Crypto.PublicKey import RSA
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(1024)
file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem'
with open(file_path, 'w') as f:
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
self.stdout.write('Something goes wrong: ' + e.message)
## Instruction:
Fix use of deprecated Exception.message in Python 3
## Code After:
from Crypto.PublicKey import RSA
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(1024)
file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem'
with open(file_path, 'w') as f:
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e))
|
# ... existing code ...
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e))
# ... rest of the code ...
|
cdf60bc0b07c282e75fba747c8adedd165aa0abd
|
index.py
|
index.py
|
from werkzeug.wrappers import Request, Response
from get_html import get_html, choose_lang
@Request.application
def run(request):
lang = choose_lang(request)
if request.url.startswith("https://") or request.args.get("forcenossl") == "true":
html = get_html("launch", lang)
else:
html = get_html("nossl", lang)
return Response(html, mimetype="text/html")
if __name__ == "__main__":
import CGI
CGI.app = run
CGI.run()
|
from werkzeug.wrappers import Request, Response
from get_html import get_html, choose_lang
@Request.application
def run(request):
lang = request.args.get("lang") if request.args.get("lang") else choose_lang(request)
if request.url.startswith("https://") or request.args.get("forcenossl") == "true":
html = get_html("launch", lang)
else:
html = get_html("nossl", lang)
return Response(html, mimetype="text/html")
if __name__ == "__main__":
import CGI
CGI.app = run
CGI.run()
|
Make the language changeable via a GET parameter.
|
Make the language changeable via a GET parameter.
|
Python
|
mit
|
YtvwlD/dyluna,YtvwlD/dyluna,YtvwlD/dyluna
|
python
|
## Code Before:
from werkzeug.wrappers import Request, Response
from get_html import get_html, choose_lang
@Request.application
def run(request):
lang = choose_lang(request)
if request.url.startswith("https://") or request.args.get("forcenossl") == "true":
html = get_html("launch", lang)
else:
html = get_html("nossl", lang)
return Response(html, mimetype="text/html")
if __name__ == "__main__":
import CGI
CGI.app = run
CGI.run()
## Instruction:
Make the language changeable via a GET parameter.
## Code After:
from werkzeug.wrappers import Request, Response
from get_html import get_html, choose_lang
@Request.application
def run(request):
lang = request.args.get("lang") if request.args.get("lang") else choose_lang(request)
if request.url.startswith("https://") or request.args.get("forcenossl") == "true":
html = get_html("launch", lang)
else:
html = get_html("nossl", lang)
return Response(html, mimetype="text/html")
if __name__ == "__main__":
import CGI
CGI.app = run
CGI.run()
|
# ... existing code ...
@Request.application
def run(request):
lang = request.args.get("lang") if request.args.get("lang") else choose_lang(request)
if request.url.startswith("https://") or request.args.get("forcenossl") == "true":
html = get_html("launch", lang)
else:
# ... rest of the code ...
|
285cd06243fdd7aacb65628f390876be3b7ca098
|
setup.py
|
setup.py
|
"""Setup for cppclean."""
from __future__ import unicode_literals
from distutils import core
with open('README') as readme:
core.setup(name='cppclean',
description='Find problems in C++ source that slow development '
'of large code bases.',
long_description=readme.read(),
packages=['cpp'],
scripts=['cppclean'])
|
"""Setup for cppclean."""
from distutils import core
with open('README') as readme:
core.setup(name='cppclean',
description='Find problems in C++ source that slow development '
'of large code bases.',
long_description=readme.read(),
packages=['cpp'],
scripts=['cppclean'])
|
Make this work on Python 2
|
Make this work on Python 2
http://bugs.python.org/issue13943
|
Python
|
apache-2.0
|
myint/cppclean,myint/cppclean,myint/cppclean,myint/cppclean
|
python
|
## Code Before:
"""Setup for cppclean."""
from __future__ import unicode_literals
from distutils import core
with open('README') as readme:
core.setup(name='cppclean',
description='Find problems in C++ source that slow development '
'of large code bases.',
long_description=readme.read(),
packages=['cpp'],
scripts=['cppclean'])
## Instruction:
Make this work on Python 2
http://bugs.python.org/issue13943
## Code After:
"""Setup for cppclean."""
from distutils import core
with open('README') as readme:
core.setup(name='cppclean',
description='Find problems in C++ source that slow development '
'of large code bases.',
long_description=readme.read(),
packages=['cpp'],
scripts=['cppclean'])
|
...
"""Setup for cppclean."""
from distutils import core
...
|
920a0814f7ff5b35729fc828099cea93dad76a73
|
src/main/kotlin/com/github/pgutkowski/kgraphql/schema/dsl/SchemaConfigurationDSL.kt
|
src/main/kotlin/com/github/pgutkowski/kgraphql/schema/dsl/SchemaConfigurationDSL.kt
|
package com.github.pgutkowski.kgraphql.schema.dsl
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.github.pgutkowski.kgraphql.configuration.SchemaConfiguration
import kotlinx.coroutines.experimental.CommonPool
class SchemaConfigurationDSL {
var useDefaultPrettyPrinter: Boolean = false
var useCachingDocumentParser: Boolean = true
var objectMapper: ObjectMapper = jacksonObjectMapper()
var documentParserCacheMaximumSize : Long = 1000L
var acceptSingleValueAsArray : Boolean = true
var coroutineDispatcher = CommonPool
internal fun update(block : SchemaConfigurationDSL.() -> Unit) = block()
internal fun build() : SchemaConfiguration {
objectMapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, acceptSingleValueAsArray)
return SchemaConfiguration (
useCachingDocumentParser,
documentParserCacheMaximumSize,
objectMapper,
useDefaultPrettyPrinter,
coroutineDispatcher
)
}
}
|
package com.github.pgutkowski.kgraphql.schema.dsl
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.github.pgutkowski.kgraphql.configuration.SchemaConfiguration
import kotlinx.coroutines.experimental.CommonPool
import kotlinx.coroutines.experimental.CoroutineDispatcher
class SchemaConfigurationDSL {
var useDefaultPrettyPrinter: Boolean = false
var useCachingDocumentParser: Boolean = true
var objectMapper: ObjectMapper = jacksonObjectMapper()
var documentParserCacheMaximumSize : Long = 1000L
var acceptSingleValueAsArray : Boolean = true
var coroutineDispatcher: CoroutineDispatcher = CommonPool
internal fun update(block : SchemaConfigurationDSL.() -> Unit) = block()
internal fun build() : SchemaConfiguration {
objectMapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, acceptSingleValueAsArray)
return SchemaConfiguration (
useCachingDocumentParser,
documentParserCacheMaximumSize,
objectMapper,
useDefaultPrettyPrinter,
coroutineDispatcher
)
}
}
|
Make couroutineDispatcher type in configuration generic
|
Make couroutineDispatcher type in configuration generic
|
Kotlin
|
mit
|
pgutkowski/KGraphQL
|
kotlin
|
## Code Before:
package com.github.pgutkowski.kgraphql.schema.dsl
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.github.pgutkowski.kgraphql.configuration.SchemaConfiguration
import kotlinx.coroutines.experimental.CommonPool
class SchemaConfigurationDSL {
var useDefaultPrettyPrinter: Boolean = false
var useCachingDocumentParser: Boolean = true
var objectMapper: ObjectMapper = jacksonObjectMapper()
var documentParserCacheMaximumSize : Long = 1000L
var acceptSingleValueAsArray : Boolean = true
var coroutineDispatcher = CommonPool
internal fun update(block : SchemaConfigurationDSL.() -> Unit) = block()
internal fun build() : SchemaConfiguration {
objectMapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, acceptSingleValueAsArray)
return SchemaConfiguration (
useCachingDocumentParser,
documentParserCacheMaximumSize,
objectMapper,
useDefaultPrettyPrinter,
coroutineDispatcher
)
}
}
## Instruction:
Make couroutineDispatcher type in configuration generic
## Code After:
package com.github.pgutkowski.kgraphql.schema.dsl
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.github.pgutkowski.kgraphql.configuration.SchemaConfiguration
import kotlinx.coroutines.experimental.CommonPool
import kotlinx.coroutines.experimental.CoroutineDispatcher
class SchemaConfigurationDSL {
var useDefaultPrettyPrinter: Boolean = false
var useCachingDocumentParser: Boolean = true
var objectMapper: ObjectMapper = jacksonObjectMapper()
var documentParserCacheMaximumSize : Long = 1000L
var acceptSingleValueAsArray : Boolean = true
var coroutineDispatcher: CoroutineDispatcher = CommonPool
internal fun update(block : SchemaConfigurationDSL.() -> Unit) = block()
internal fun build() : SchemaConfiguration {
objectMapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, acceptSingleValueAsArray)
return SchemaConfiguration (
useCachingDocumentParser,
documentParserCacheMaximumSize,
objectMapper,
useDefaultPrettyPrinter,
coroutineDispatcher
)
}
}
|
...
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.github.pgutkowski.kgraphql.configuration.SchemaConfiguration
import kotlinx.coroutines.experimental.CommonPool
import kotlinx.coroutines.experimental.CoroutineDispatcher
class SchemaConfigurationDSL {
...
var objectMapper: ObjectMapper = jacksonObjectMapper()
var documentParserCacheMaximumSize : Long = 1000L
var acceptSingleValueAsArray : Boolean = true
var coroutineDispatcher: CoroutineDispatcher = CommonPool
internal fun update(block : SchemaConfigurationDSL.() -> Unit) = block()
...
|
5286d7b92d22b0d4e126f8d1cf41a89e8e6548d2
|
tests/src/com/vaadin/tests/components/window/EmbeddedInSubWindow.java
|
tests/src/com/vaadin/tests/components/window/EmbeddedInSubWindow.java
|
package com.vaadin.tests.components.window;
import com.vaadin.terminal.ThemeResource;
import com.vaadin.tests.components.TestBase;
import com.vaadin.ui.Embedded;
import com.vaadin.ui.Window;
public class EmbeddedInSubWindow extends TestBase {
@Override
protected String getDescription() {
return "The sub window contains a large icon and should be sized according to the icon. The icon contains a blue border of 10px at the outer edges. The layout in the sub window has margins enabled.";
}
@Override
protected Integer getTicketNumber() {
return null;
}
@Override
protected void setup() {
setTheme("tests-tickets");
Window zoom = new Window("Image Preview");
zoom.setSizeUndefined();
zoom.getContent().setSizeUndefined();
String res = "icons/EmbeddedInSubWindow-image.png";
Embedded imagePreview = new Embedded(null, new ThemeResource(res));
imagePreview.setSizeUndefined();
zoom.addComponent(imagePreview);
zoom.setModal(true);
zoom.setResizable(false);
zoom.addListener(new Window.CloseListener() {
public void windowClose(Window.CloseEvent closeEvent) {
getMainWindow().removeWindow(closeEvent.getWindow());
}
});
getMainWindow().addWindow(zoom);
}
}
|
package com.vaadin.tests.components.window;
import com.vaadin.terminal.ThemeResource;
import com.vaadin.tests.components.TestBase;
import com.vaadin.ui.Embedded;
import com.vaadin.ui.Window;
public class EmbeddedInSubWindow extends TestBase {
@Override
protected String getDescription() {
return "The sub window contains a large icon and should be sized according to the icon. The icon contains a blue border of 10px at the outer edges. The layout in the sub window has margins enabled.";
}
@Override
protected Integer getTicketNumber() {
return null;
}
@Override
protected void setup() {
setTheme("tests-tickets");
Window zoom = new Window("Image Preview");
zoom.setSizeUndefined();
zoom.getContent().setSizeUndefined();
String res = "icons/EmbeddedInSubWindow-image.png";
Embedded imagePreview = new Embedded(null, new ThemeResource(res));
imagePreview.setSizeUndefined();
zoom.addComponent(imagePreview);
zoom.setModal(true);
zoom.setResizable(false);
getMainWindow().addWindow(zoom);
}
}
|
Remove obsolete/unnecessary close listener from a test class.
|
Remove obsolete/unnecessary close listener from a test class.
svn changeset:15712/svn branch:6.5
|
Java
|
apache-2.0
|
jdahlstrom/vaadin.react,sitexa/vaadin,jdahlstrom/vaadin.react,fireflyc/vaadin,peterl1084/framework,Legioth/vaadin,bmitc/vaadin,magi42/vaadin,cbmeeks/vaadin,Flamenco/vaadin,oalles/vaadin,Darsstar/framework,peterl1084/framework,Flamenco/vaadin,oalles/vaadin,carrchang/vaadin,kironapublic/vaadin,oalles/vaadin,asashour/framework,mstahv/framework,shahrzadmn/vaadin,synes/vaadin,Darsstar/framework,cbmeeks/vaadin,cbmeeks/vaadin,shahrzadmn/vaadin,mstahv/framework,mstahv/framework,travisfw/vaadin,travisfw/vaadin,fireflyc/vaadin,Darsstar/framework,magi42/vaadin,Peppe/vaadin,Legioth/vaadin,Darsstar/framework,cbmeeks/vaadin,asashour/framework,carrchang/vaadin,travisfw/vaadin,synes/vaadin,Peppe/vaadin,shahrzadmn/vaadin,Legioth/vaadin,sitexa/vaadin,peterl1084/framework,sitexa/vaadin,travisfw/vaadin,asashour/framework,synes/vaadin,mittop/vaadin,Flamenco/vaadin,udayinfy/vaadin,Scarlethue/vaadin,peterl1084/framework,Peppe/vaadin,kironapublic/vaadin,Darsstar/framework,fireflyc/vaadin,kironapublic/vaadin,bmitc/vaadin,carrchang/vaadin,asashour/framework,jdahlstrom/vaadin.react,Peppe/vaadin,Scarlethue/vaadin,Legioth/vaadin,fireflyc/vaadin,Flamenco/vaadin,bmitc/vaadin,Scarlethue/vaadin,udayinfy/vaadin,asashour/framework,carrchang/vaadin,mittop/vaadin,sitexa/vaadin,sitexa/vaadin,synes/vaadin,peterl1084/framework,udayinfy/vaadin,magi42/vaadin,travisfw/vaadin,kironapublic/vaadin,Peppe/vaadin,shahrzadmn/vaadin,udayinfy/vaadin,mittop/vaadin,magi42/vaadin,mstahv/framework,Scarlethue/vaadin,jdahlstrom/vaadin.react,Legioth/vaadin,magi42/vaadin,bmitc/vaadin,oalles/vaadin,mstahv/framework,udayinfy/vaadin,shahrzadmn/vaadin,synes/vaadin,kironapublic/vaadin,fireflyc/vaadin,jdahlstrom/vaadin.react,Scarlethue/vaadin,mittop/vaadin,oalles/vaadin
|
java
|
## Code Before:
package com.vaadin.tests.components.window;
import com.vaadin.terminal.ThemeResource;
import com.vaadin.tests.components.TestBase;
import com.vaadin.ui.Embedded;
import com.vaadin.ui.Window;
public class EmbeddedInSubWindow extends TestBase {
@Override
protected String getDescription() {
return "The sub window contains a large icon and should be sized according to the icon. The icon contains a blue border of 10px at the outer edges. The layout in the sub window has margins enabled.";
}
@Override
protected Integer getTicketNumber() {
return null;
}
@Override
protected void setup() {
setTheme("tests-tickets");
Window zoom = new Window("Image Preview");
zoom.setSizeUndefined();
zoom.getContent().setSizeUndefined();
String res = "icons/EmbeddedInSubWindow-image.png";
Embedded imagePreview = new Embedded(null, new ThemeResource(res));
imagePreview.setSizeUndefined();
zoom.addComponent(imagePreview);
zoom.setModal(true);
zoom.setResizable(false);
zoom.addListener(new Window.CloseListener() {
public void windowClose(Window.CloseEvent closeEvent) {
getMainWindow().removeWindow(closeEvent.getWindow());
}
});
getMainWindow().addWindow(zoom);
}
}
## Instruction:
Remove obsolete/unnecessary close listener from a test class.
svn changeset:15712/svn branch:6.5
## Code After:
package com.vaadin.tests.components.window;
import com.vaadin.terminal.ThemeResource;
import com.vaadin.tests.components.TestBase;
import com.vaadin.ui.Embedded;
import com.vaadin.ui.Window;
public class EmbeddedInSubWindow extends TestBase {
@Override
protected String getDescription() {
return "The sub window contains a large icon and should be sized according to the icon. The icon contains a blue border of 10px at the outer edges. The layout in the sub window has margins enabled.";
}
@Override
protected Integer getTicketNumber() {
return null;
}
@Override
protected void setup() {
setTheme("tests-tickets");
Window zoom = new Window("Image Preview");
zoom.setSizeUndefined();
zoom.getContent().setSizeUndefined();
String res = "icons/EmbeddedInSubWindow-image.png";
Embedded imagePreview = new Embedded(null, new ThemeResource(res));
imagePreview.setSizeUndefined();
zoom.addComponent(imagePreview);
zoom.setModal(true);
zoom.setResizable(false);
getMainWindow().addWindow(zoom);
}
}
|
# ... existing code ...
zoom.setModal(true);
zoom.setResizable(false);
getMainWindow().addWindow(zoom);
}
# ... rest of the code ...
|
220b5e4a6b17e4c79898ede502c7ce3563a8ed0d
|
wikipedia/src/main/java/org/wikipedia/settings/PreferenceMultiLine.java
|
wikipedia/src/main/java/org/wikipedia/settings/PreferenceMultiLine.java
|
package org.wikipedia.settings;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.preference.Preference;
import android.util.AttributeSet;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import org.wikipedia.R;
public class PreferenceMultiLine extends Preference {
public PreferenceMultiLine(Context ctx, AttributeSet attrs, int defStyle) {
super(ctx, attrs, defStyle);
}
public PreferenceMultiLine(Context ctx, AttributeSet attrs) {
super(ctx, attrs);
}
public PreferenceMultiLine(Context ctx) {
super(ctx);
}
@Override
protected void onBindView(View view) {
super.onBindView(view);
TextView textView = (TextView) view.findViewById(android.R.id.title);
if (textView != null) {
textView.setSingleLine(false);
}
// Intercept the click listener for this preference, and if the preference has an intent,
// launch the intent ourselves, so that we can catch the exception if the intent fails.
this.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
if (preference.getIntent() != null) {
try {
getContext().startActivity(preference.getIntent());
} catch (ActivityNotFoundException e) {
Toast.makeText(getContext(), getContext().getString(R.string.error_browser_not_found), Toast.LENGTH_LONG).show();
}
return true;
}
return false;
}
});
}
}
|
package org.wikipedia.settings;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.preference.Preference;
import android.util.AttributeSet;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import org.wikipedia.R;
public class PreferenceMultiLine extends Preference {
public PreferenceMultiLine(Context ctx, AttributeSet attrs, int defStyle) {
super(ctx, attrs, defStyle);
}
public PreferenceMultiLine(Context ctx, AttributeSet attrs) {
super(ctx, attrs);
}
public PreferenceMultiLine(Context ctx) {
super(ctx);
}
@Override
protected void onBindView(View view) {
super.onBindView(view);
TextView textView = (TextView) view.findViewById(android.R.id.title);
if (textView != null) {
textView.setSingleLine(false);
}
// Intercept the click listener for this preference, and if the preference has an intent,
// launch the intent ourselves, so that we can catch the exception if the intent fails.
// (but only do this if the preference doesn't already have a click listener)
if (this.getOnPreferenceClickListener() == null) {
this.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
if (preference.getIntent() != null) {
try {
getContext().startActivity(preference.getIntent());
} catch (ActivityNotFoundException e) {
Toast.makeText(getContext(), getContext().getString(R.string.error_browser_not_found), Toast.LENGTH_LONG).show();
}
return true;
}
return false;
}
});
}
}
}
|
Fix "logout" preference after crash fix.
|
Fix "logout" preference after crash fix.
(became no longer clickable)
Now giving an onClickListener to a preference only if it doesn't already
have a listener.
Change-Id: I93b25da9485477737f877abae34188d0243bebd4
|
Java
|
apache-2.0
|
SAGROUP2/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,dbrant/apps-android-wikipedia,carloshwa/apps-android-wikipedia,wikimedia/apps-android-wikipedia,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,Wikinaut/wikipedia-app,anirudh24seven/apps-android-wikipedia,parvez3019/apps-android-wikipedia,Wikinaut/wikipedia-app,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,reproio/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,wikimedia/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,parvez3019/apps-android-wikipedia,reproio/apps-android-wikipedia,reproio/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,parvez3019/apps-android-wikipedia,reproio/apps-android-wikipedia,carloshwa/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,Wikinaut/wikipedia-app,SAGROUP2/apps-android-wikipedia,carloshwa/apps-android-wikipedia,Wikinaut/wikipedia-app,Wikinaut/wikipedia-app,SAGROUP2/apps-android-wikipedia,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,carloshwa/apps-android-wikipedia,parvez3019/apps-android-wikipedia,dbrant/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,parvez3019/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android
|
java
|
## Code Before:
package org.wikipedia.settings;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.preference.Preference;
import android.util.AttributeSet;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import org.wikipedia.R;
public class PreferenceMultiLine extends Preference {
public PreferenceMultiLine(Context ctx, AttributeSet attrs, int defStyle) {
super(ctx, attrs, defStyle);
}
public PreferenceMultiLine(Context ctx, AttributeSet attrs) {
super(ctx, attrs);
}
public PreferenceMultiLine(Context ctx) {
super(ctx);
}
@Override
protected void onBindView(View view) {
super.onBindView(view);
TextView textView = (TextView) view.findViewById(android.R.id.title);
if (textView != null) {
textView.setSingleLine(false);
}
// Intercept the click listener for this preference, and if the preference has an intent,
// launch the intent ourselves, so that we can catch the exception if the intent fails.
this.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
if (preference.getIntent() != null) {
try {
getContext().startActivity(preference.getIntent());
} catch (ActivityNotFoundException e) {
Toast.makeText(getContext(), getContext().getString(R.string.error_browser_not_found), Toast.LENGTH_LONG).show();
}
return true;
}
return false;
}
});
}
}
## Instruction:
Fix "logout" preference after crash fix.
(became no longer clickable)
Now giving an onClickListener to a preference only if it doesn't already
have a listener.
Change-Id: I93b25da9485477737f877abae34188d0243bebd4
## Code After:
package org.wikipedia.settings;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.preference.Preference;
import android.util.AttributeSet;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import org.wikipedia.R;
public class PreferenceMultiLine extends Preference {
public PreferenceMultiLine(Context ctx, AttributeSet attrs, int defStyle) {
super(ctx, attrs, defStyle);
}
public PreferenceMultiLine(Context ctx, AttributeSet attrs) {
super(ctx, attrs);
}
public PreferenceMultiLine(Context ctx) {
super(ctx);
}
@Override
protected void onBindView(View view) {
super.onBindView(view);
TextView textView = (TextView) view.findViewById(android.R.id.title);
if (textView != null) {
textView.setSingleLine(false);
}
// Intercept the click listener for this preference, and if the preference has an intent,
// launch the intent ourselves, so that we can catch the exception if the intent fails.
// (but only do this if the preference doesn't already have a click listener)
if (this.getOnPreferenceClickListener() == null) {
this.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
if (preference.getIntent() != null) {
try {
getContext().startActivity(preference.getIntent());
} catch (ActivityNotFoundException e) {
Toast.makeText(getContext(), getContext().getString(R.string.error_browser_not_found), Toast.LENGTH_LONG).show();
}
return true;
}
return false;
}
});
}
}
}
|
# ... existing code ...
}
// Intercept the click listener for this preference, and if the preference has an intent,
// launch the intent ourselves, so that we can catch the exception if the intent fails.
// (but only do this if the preference doesn't already have a click listener)
if (this.getOnPreferenceClickListener() == null) {
this.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
if (preference.getIntent() != null) {
try {
getContext().startActivity(preference.getIntent());
} catch (ActivityNotFoundException e) {
Toast.makeText(getContext(), getContext().getString(R.string.error_browser_not_found), Toast.LENGTH_LONG).show();
}
return true;
}
return false;
}
});
}
}
}
# ... rest of the code ...
|
ca74738e9241230fd0cc843aa9b76f67494d02eb
|
python/intermediate/create_inter_python_data.py
|
python/intermediate/create_inter_python_data.py
|
"""Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
years = np.arange(1960, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = 0.5 * temps + 0.7 * rainfalls + noise
data = zip(years, temps, rainfalls, mosquitos)
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv('mosquito_data_A2.csv', index=False, float_format='%.0f')
df_short = df[-10:]
df_short.to_csv('mosquito_data_A1.csv', index=False, float_format='%.0f')
|
"""Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
datasets = {'A1': [0, 0.5, 0.7, 10],
'A2': [0, 0.5, 0.7, 50],
'A3': [0, 0.5, 0.3, 50],
'B1': [3, 0.7, 0.2, 50],
'B2': [3, 0.7, 0.7, 50]}
def make_data(intercept, tempslope, rainfallslope, numyears):
years = np.arange(2010 - numyears, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = intercept + tempslope * temps + rainfallslope * rainfalls + noise
return zip(years, temps, rainfalls, mosquitos)
def export_data(data, filename):
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv(filename, index=False, float_format='%.0f')
for site in datasets:
data = make_data(*datasets[site])
if site == 'A1':
#create a shorter dataset for first example
data = data[-10:]
export_data(data, '%s_mosquito_data.csv' % site)
|
Allow creation of multiple example data files for Inter Python
|
Allow creation of multiple example data files for Inter Python
Generalizes the script for creating data files to allow for the
easy generation of larger numbers of data files.
|
Python
|
bsd-2-clause
|
selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest
|
python
|
## Code Before:
"""Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
years = np.arange(1960, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = 0.5 * temps + 0.7 * rainfalls + noise
data = zip(years, temps, rainfalls, mosquitos)
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv('mosquito_data_A2.csv', index=False, float_format='%.0f')
df_short = df[-10:]
df_short.to_csv('mosquito_data_A1.csv', index=False, float_format='%.0f')
## Instruction:
Allow creation of multiple example data files for Inter Python
Generalizes the script for creating data files to allow for the
easy generation of larger numbers of data files.
## Code After:
"""Create the data for the Software Carpentry Intermediate Python lectures"""
import numpy as np
import pandas as pd
np.random.seed(26)
datasets = {'A1': [0, 0.5, 0.7, 10],
'A2': [0, 0.5, 0.7, 50],
'A3': [0, 0.5, 0.3, 50],
'B1': [3, 0.7, 0.2, 50],
'B2': [3, 0.7, 0.7, 50]}
def make_data(intercept, tempslope, rainfallslope, numyears):
years = np.arange(2010 - numyears, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = intercept + tempslope * temps + rainfallslope * rainfalls + noise
return zip(years, temps, rainfalls, mosquitos)
def export_data(data, filename):
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv(filename, index=False, float_format='%.0f')
for site in datasets:
data = make_data(*datasets[site])
if site == 'A1':
#create a shorter dataset for first example
data = data[-10:]
export_data(data, '%s_mosquito_data.csv' % site)
|
// ... existing code ...
import pandas as pd
np.random.seed(26)
datasets = {'A1': [0, 0.5, 0.7, 10],
'A2': [0, 0.5, 0.7, 50],
'A3': [0, 0.5, 0.3, 50],
'B1': [3, 0.7, 0.2, 50],
'B2': [3, 0.7, 0.7, 50]}
def make_data(intercept, tempslope, rainfallslope, numyears):
years = np.arange(2010 - numyears, 2011)
temps = np.random.uniform(70, 90, len(years))
rainfalls = np.random.uniform(100, 300, len(years))
noise = 2 * np.random.randn(len(years))
mosquitos = intercept + tempslope * temps + rainfallslope * rainfalls + noise
return zip(years, temps, rainfalls, mosquitos)
def export_data(data, filename):
df = pd.DataFrame(data, columns=['year', 'temperature', 'rainfall','mosquitos'])
df.to_csv(filename, index=False, float_format='%.0f')
for site in datasets:
data = make_data(*datasets[site])
if site == 'A1':
#create a shorter dataset for first example
data = data[-10:]
export_data(data, '%s_mosquito_data.csv' % site)
// ... rest of the code ...
|
b06a1d4b43b77e6e4ad4e6566460bf7eb24ab7de
|
wicket-orientdb/src/main/java/ru/ydn/wicket/wicketorientdb/utils/OClassChoiceRenderer.java
|
wicket-orientdb/src/main/java/ru/ydn/wicket/wicketorientdb/utils/OClassChoiceRenderer.java
|
package ru.ydn.wicket.wicketorientdb.utils;
import java.util.List;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.model.IModel;
import ru.ydn.wicket.wicketorientdb.OrientDbWebSession;
import ru.ydn.wicket.wicketorientdb.model.OClassNamingModel;
import com.orientechnologies.orient.core.metadata.schema.OClass;
/**
* {@link IChoiceRenderer} for {@link OClass}es
*/
public class OClassChoiceRenderer implements IChoiceRenderer<OClass> {
public static final OClassChoiceRenderer INSTANCE = new OClassChoiceRenderer();
private static final long serialVersionUID = 1L;
@Override
public Object getDisplayValue(OClass object) {
return new OClassNamingModel(object).getObject();
}
@Override
public String getIdValue(OClass object, int index) {
return object.getName();
}
@Override
public OClass getObject(String id,
IModel<? extends List<? extends OClass>> choicesModel) {
OClass ret = OrientDbWebSession.get().getDatabase().getMetadata().getSchema().getClass(id);
List<? extends OClass> choices = choicesModel.getObject();
return choices!=null && choices.contains(ret) ? ret : null;
}
}
|
package ru.ydn.wicket.wicketorientdb.utils;
import java.util.List;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.model.IModel;
import ru.ydn.wicket.wicketorientdb.OrientDbWebSession;
import ru.ydn.wicket.wicketorientdb.model.OClassNamingModel;
import com.orientechnologies.orient.core.metadata.schema.OClass;
/**
* {@link IChoiceRenderer} for {@link OClass}es
*/
public class OClassChoiceRenderer implements IChoiceRenderer<OClass> {
public static final OClassChoiceRenderer INSTANCE = new OClassChoiceRenderer(true);
public static final OClassChoiceRenderer INSTANCE_NO_LOCALIZATION = new OClassChoiceRenderer(false);
private static final long serialVersionUID = 1L;
private boolean localize;
public OClassChoiceRenderer(boolean localize) {
this.localize = localize;
}
@Override
public Object getDisplayValue(OClass object) {
return localize?new OClassNamingModel(object).getObject():object.getName();
}
@Override
public String getIdValue(OClass object, int index) {
return object.getName();
}
@Override
public OClass getObject(String id,
IModel<? extends List<? extends OClass>> choicesModel) {
OClass ret = OrientDbWebSession.get().getDatabase().getMetadata().getSchema().getClass(id);
List<? extends OClass> choices = choicesModel.getObject();
return choices!=null && choices.contains(ret) ? ret : null;
}
}
|
Allow to avoid translation of classname
|
Allow to avoid translation of classname
|
Java
|
apache-2.0
|
OrienteerDW/wicket-orientdb,OrienteerDW/wicket-orientdb,OrienteerBAP/wicket-orientdb,OrienteerBAP/wicket-orientdb
|
java
|
## Code Before:
package ru.ydn.wicket.wicketorientdb.utils;
import java.util.List;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.model.IModel;
import ru.ydn.wicket.wicketorientdb.OrientDbWebSession;
import ru.ydn.wicket.wicketorientdb.model.OClassNamingModel;
import com.orientechnologies.orient.core.metadata.schema.OClass;
/**
* {@link IChoiceRenderer} for {@link OClass}es
*/
public class OClassChoiceRenderer implements IChoiceRenderer<OClass> {
public static final OClassChoiceRenderer INSTANCE = new OClassChoiceRenderer();
private static final long serialVersionUID = 1L;
@Override
public Object getDisplayValue(OClass object) {
return new OClassNamingModel(object).getObject();
}
@Override
public String getIdValue(OClass object, int index) {
return object.getName();
}
@Override
public OClass getObject(String id,
IModel<? extends List<? extends OClass>> choicesModel) {
OClass ret = OrientDbWebSession.get().getDatabase().getMetadata().getSchema().getClass(id);
List<? extends OClass> choices = choicesModel.getObject();
return choices!=null && choices.contains(ret) ? ret : null;
}
}
## Instruction:
Allow to avoid translation of classname
## Code After:
package ru.ydn.wicket.wicketorientdb.utils;
import java.util.List;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.model.IModel;
import ru.ydn.wicket.wicketorientdb.OrientDbWebSession;
import ru.ydn.wicket.wicketorientdb.model.OClassNamingModel;
import com.orientechnologies.orient.core.metadata.schema.OClass;
/**
* {@link IChoiceRenderer} for {@link OClass}es
*/
public class OClassChoiceRenderer implements IChoiceRenderer<OClass> {
public static final OClassChoiceRenderer INSTANCE = new OClassChoiceRenderer(true);
public static final OClassChoiceRenderer INSTANCE_NO_LOCALIZATION = new OClassChoiceRenderer(false);
private static final long serialVersionUID = 1L;
private boolean localize;
public OClassChoiceRenderer(boolean localize) {
this.localize = localize;
}
@Override
public Object getDisplayValue(OClass object) {
return localize?new OClassNamingModel(object).getObject():object.getName();
}
@Override
public String getIdValue(OClass object, int index) {
return object.getName();
}
@Override
public OClass getObject(String id,
IModel<? extends List<? extends OClass>> choicesModel) {
OClass ret = OrientDbWebSession.get().getDatabase().getMetadata().getSchema().getClass(id);
List<? extends OClass> choices = choicesModel.getObject();
return choices!=null && choices.contains(ret) ? ret : null;
}
}
|
# ... existing code ...
*/
public class OClassChoiceRenderer implements IChoiceRenderer<OClass> {
public static final OClassChoiceRenderer INSTANCE = new OClassChoiceRenderer(true);
public static final OClassChoiceRenderer INSTANCE_NO_LOCALIZATION = new OClassChoiceRenderer(false);
private static final long serialVersionUID = 1L;
private boolean localize;
public OClassChoiceRenderer(boolean localize) {
this.localize = localize;
}
@Override
public Object getDisplayValue(OClass object) {
return localize?new OClassNamingModel(object).getObject():object.getName();
}
@Override
# ... rest of the code ...
|
adcb7af597c77d85eb9234d91e2c0bd8575630e1
|
fcm_django/api/__init__.py
|
fcm_django/api/__init__.py
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceResource, GCMDeviceResource, WNSDeviceResource, APNSDeviceAuthenticatedResource, \
GCMDeviceAuthenticatedResource, WNSDeviceAuthenticatedResource
__all__ = [
"APNSDeviceResource",
"GCMDeviceResource",
"WNSDeviceResource",
"APNSDeviceAuthenticatedResource",
"GCMDeviceAuthenticatedResource",
"WNSDeviceAuthenticatedResource",
]
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
__all__ = [
"APNSDeviceAuthenticatedResource",
"FCMDeviceResource",
]
|
Remove references to old resources
|
Remove references to old resources
|
Python
|
mit
|
xtrinch/fcm-django
|
python
|
## Code Before:
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceResource, GCMDeviceResource, WNSDeviceResource, APNSDeviceAuthenticatedResource, \
GCMDeviceAuthenticatedResource, WNSDeviceAuthenticatedResource
__all__ = [
"APNSDeviceResource",
"GCMDeviceResource",
"WNSDeviceResource",
"APNSDeviceAuthenticatedResource",
"GCMDeviceAuthenticatedResource",
"WNSDeviceAuthenticatedResource",
]
## Instruction:
Remove references to old resources
## Code After:
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
__all__ = [
"APNSDeviceAuthenticatedResource",
"FCMDeviceResource",
]
|
# ... existing code ...
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
__all__ = [
"APNSDeviceAuthenticatedResource",
"FCMDeviceResource",
]
# ... rest of the code ...
|
a099eab75245005527e03fb5278a49a6d565c8f9
|
wagtailstartproject/project_template/tests/test_selenium/test_pages.py
|
wagtailstartproject/project_template/tests/test_selenium/test_pages.py
|
from wagtail.wagtailcore.models import Page
from .base import SeleniumTestCase
class PagesTest(SeleniumTestCase):
fixtures = ['basic_site.json']
def test_wagtail_pages(self):
"""Check if all Wagtail pages can be retrieved"""
pages = Page.objects.live()
for page in pages:
url = page.relative_url(page.get_site())
if url is not None:
self.get(url)
self.assert_status_code('200')
|
from wagtail.wagtailcore.models import Page
from .base import SeleniumTestCase
class PagesTest(SeleniumTestCase):
def test_wagtail_pages(self):
"""Check if all Wagtail pages can be retrieved"""
pages = Page.objects.live()
for page in pages:
url = page.relative_url(page.get_site())
if url is not None:
self.get(url)
self.assert_status_code('200')
|
Remove unnecessary fixtures attribute, already set by base class.
|
Remove unnecessary fixtures attribute, already set by base class.
|
Python
|
mit
|
leukeleu/wagtail-startproject,leukeleu/wagtail-startproject
|
python
|
## Code Before:
from wagtail.wagtailcore.models import Page
from .base import SeleniumTestCase
class PagesTest(SeleniumTestCase):
fixtures = ['basic_site.json']
def test_wagtail_pages(self):
"""Check if all Wagtail pages can be retrieved"""
pages = Page.objects.live()
for page in pages:
url = page.relative_url(page.get_site())
if url is not None:
self.get(url)
self.assert_status_code('200')
## Instruction:
Remove unnecessary fixtures attribute, already set by base class.
## Code After:
from wagtail.wagtailcore.models import Page
from .base import SeleniumTestCase
class PagesTest(SeleniumTestCase):
def test_wagtail_pages(self):
"""Check if all Wagtail pages can be retrieved"""
pages = Page.objects.live()
for page in pages:
url = page.relative_url(page.get_site())
if url is not None:
self.get(url)
self.assert_status_code('200')
|
...
class PagesTest(SeleniumTestCase):
def test_wagtail_pages(self):
"""Check if all Wagtail pages can be retrieved"""
...
|
a7bb3eebdfa71ad87301fb84d54fd6de0249ec29
|
bin/initialize_data.py
|
bin/initialize_data.py
|
from twitterbot.twitter_bot import get_redis
def add_data(redis, key, data):
for item in data:
redis.sadd(key, item.encode('utf-8'))
redis = get_redis('127.0.0.1:6379')
redis.delete('adjectives', 'sentences')
adjectives = ('smart', 'helpful', 'kind', 'hard-working', 'meticulous', 'diligent')
add_data(redis, 'adjectives', adjectives)
sentences = ('I really appreciate how {} you are.',
'I am super inspired by how {} you are.',
'I am so impressed by how {} you are.')
add_data(redis, 'sentences', sentences)
|
from twitterbot.twitter_bot import get_redis
def add_data(redis, key, data):
for item in data:
redis.sadd(key, item.encode('utf-8'))
redis = get_redis()
redis.delete('adjectives', 'sentences')
adjectives = ('smart', 'helpful', 'kind', 'hard-working', 'meticulous', 'diligent')
add_data(redis, 'adjectives', adjectives)
sentences = ('I really appreciate how {} you are.',
'I am super inspired by how {} you are.',
'I am so impressed by how {} you are.')
add_data(redis, 'sentences', sentences)
|
Use redis configuration when initializing data
|
Use redis configuration when initializing data
|
Python
|
mit
|
jessamynsmith/twitterbot,jessamynsmith/heartbot,jessamynsmith/twitterbot,jessamynsmith/heartbot
|
python
|
## Code Before:
from twitterbot.twitter_bot import get_redis
def add_data(redis, key, data):
for item in data:
redis.sadd(key, item.encode('utf-8'))
redis = get_redis('127.0.0.1:6379')
redis.delete('adjectives', 'sentences')
adjectives = ('smart', 'helpful', 'kind', 'hard-working', 'meticulous', 'diligent')
add_data(redis, 'adjectives', adjectives)
sentences = ('I really appreciate how {} you are.',
'I am super inspired by how {} you are.',
'I am so impressed by how {} you are.')
add_data(redis, 'sentences', sentences)
## Instruction:
Use redis configuration when initializing data
## Code After:
from twitterbot.twitter_bot import get_redis
def add_data(redis, key, data):
for item in data:
redis.sadd(key, item.encode('utf-8'))
redis = get_redis()
redis.delete('adjectives', 'sentences')
adjectives = ('smart', 'helpful', 'kind', 'hard-working', 'meticulous', 'diligent')
add_data(redis, 'adjectives', adjectives)
sentences = ('I really appreciate how {} you are.',
'I am super inspired by how {} you are.',
'I am so impressed by how {} you are.')
add_data(redis, 'sentences', sentences)
|
// ... existing code ...
for item in data:
redis.sadd(key, item.encode('utf-8'))
redis = get_redis()
redis.delete('adjectives', 'sentences')
// ... rest of the code ...
|
d3bd245fd9dd935a74a685fa823cb1f24bb9bc12
|
alura/java-bd/loja-virtual/TestaInsercao.java
|
alura/java-bd/loja-virtual/TestaInsercao.java
|
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
public class TestaInsercao {
public static void main(String[] args) throws SQLException {
Connection connection = Database.getConnection();
Statement statement = connection.createStatement();
boolean resultado = statement.execute("insert into Produto (nome, descricao) values('Notebook', 'Notebook i5'", Statement.RETURN_GENERATE_KEYS);
System.out.println(resultado);
ResultSet resultSet = statement.getGeneratedKeys();
while(resultSet.next()) {
String id = resultSet.getString("id");
System.out.println(id + " gerado");
}
statement.close();
connection.close();
}
}
|
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
public class TestaInsercao {
public static void main(String[] args) throws SQLException {
String nome = "Notebook";
String descricao = " Notebook i5";
Connection connection = Database.getConnection();
String sql = "insert into Produto (nome, descricao) values(?, ?)";
PreparedStatement statement = connection.prepareStatement(sql,
Statement.RETURN_GENERATE_KEYS);
statement.setString(1, nome);
statement.setString(2, descricao);
boolean resultado = statement.execute();
System.out.println(resultado);
ResultSet resultSet = statement.getGeneratedKeys();
while(resultSet.next()) {
String id = resultSet.getString("id");
System.out.println(id + " gerado");
}
resultSet.close();
statement.close();
connection.close();
}
}
|
Insert new files, Alura, JDBC e banco de dados em Java, Aula 3
|
Insert new files, Alura, JDBC e banco de dados em Java, Aula 3
|
Java
|
mit
|
fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs
|
java
|
## Code Before:
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
public class TestaInsercao {
public static void main(String[] args) throws SQLException {
Connection connection = Database.getConnection();
Statement statement = connection.createStatement();
boolean resultado = statement.execute("insert into Produto (nome, descricao) values('Notebook', 'Notebook i5'", Statement.RETURN_GENERATE_KEYS);
System.out.println(resultado);
ResultSet resultSet = statement.getGeneratedKeys();
while(resultSet.next()) {
String id = resultSet.getString("id");
System.out.println(id + " gerado");
}
statement.close();
connection.close();
}
}
## Instruction:
Insert new files, Alura, JDBC e banco de dados em Java, Aula 3
## Code After:
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
public class TestaInsercao {
public static void main(String[] args) throws SQLException {
String nome = "Notebook";
String descricao = " Notebook i5";
Connection connection = Database.getConnection();
String sql = "insert into Produto (nome, descricao) values(?, ?)";
PreparedStatement statement = connection.prepareStatement(sql,
Statement.RETURN_GENERATE_KEYS);
statement.setString(1, nome);
statement.setString(2, descricao);
boolean resultado = statement.execute();
System.out.println(resultado);
ResultSet resultSet = statement.getGeneratedKeys();
while(resultSet.next()) {
String id = resultSet.getString("id");
System.out.println(id + " gerado");
}
resultSet.close();
statement.close();
connection.close();
}
}
|
// ... existing code ...
public class TestaInsercao {
public static void main(String[] args) throws SQLException {
String nome = "Notebook";
String descricao = " Notebook i5";
Connection connection = Database.getConnection();
String sql = "insert into Produto (nome, descricao) values(?, ?)";
PreparedStatement statement = connection.prepareStatement(sql,
Statement.RETURN_GENERATE_KEYS);
statement.setString(1, nome);
statement.setString(2, descricao);
boolean resultado = statement.execute();
System.out.println(resultado);
ResultSet resultSet = statement.getGeneratedKeys();
// ... modified code ...
System.out.println(id + " gerado");
}
resultSet.close();
statement.close();
connection.close();
}
// ... rest of the code ...
|
36d3c2f81ea39968bc58bab172e6bf035147ae3c
|
mpld3/test_plots/test_logscale.py
|
mpld3/test_plots/test_logscale.py
|
"""Plot to test logscale"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
Add TODO to broken test
|
Add TODO to broken test
|
Python
|
bsd-3-clause
|
mpld3/mpld3,jakevdp/mpld3,jakevdp/mpld3,mpld3/mpld3
|
python
|
## Code Before:
"""Plot to test logscale"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
## Instruction:
Add TODO to broken test
## Code After:
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
// ... existing code ...
import matplotlib.pyplot as plt
import numpy as np
import mpld3
// ... rest of the code ...
|
d18ff30bbddde5049ffbe23bce19288c3c47e41b
|
posts/views.py
|
posts/views.py
|
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import Post
class PostListView(ListView):
model = Post
context_object_name = 'posts'
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
|
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import Post
class PostListView(ListView):
model = Post
context_object_name = 'posts'
def get_queryset(self):
"""
Order posts by the day they were added, from newest, to oldest.
"""
queryset = super(PostListView, self).get_queryset()
return queryset.order_by('-added_at')
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
|
Order posts from newest to oldest
|
posts: Order posts from newest to oldest
|
Python
|
mit
|
rtrembecky/roots,tbabej/roots,rtrembecky/roots,tbabej/roots,matus-stehlik/roots,matus-stehlik/roots,matus-stehlik/glowing-batman,matus-stehlik/roots,matus-stehlik/glowing-batman,rtrembecky/roots,tbabej/roots
|
python
|
## Code Before:
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import Post
class PostListView(ListView):
model = Post
context_object_name = 'posts'
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
## Instruction:
posts: Order posts from newest to oldest
## Code After:
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import Post
class PostListView(ListView):
model = Post
context_object_name = 'posts'
def get_queryset(self):
"""
Order posts by the day they were added, from newest, to oldest.
"""
queryset = super(PostListView, self).get_queryset()
return queryset.order_by('-added_at')
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
|
...
model = Post
context_object_name = 'posts'
def get_queryset(self):
"""
Order posts by the day they were added, from newest, to oldest.
"""
queryset = super(PostListView, self).get_queryset()
return queryset.order_by('-added_at')
class PostDetailView(DetailView):
model = Post
...
|
e93789084c03b2a566835006d6d5adaee3d4bbe6
|
silk/globals.py
|
silk/globals.py
|
__all__ = []
try:
from silk.webdoc import css, html, node
__all__.extend(('css', 'html', 'node'))
except ImportError:
pass
try:
from silk.webdb import (
AuthenticationError, BoolColumn, Column, DB, DataColumn,
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
__all__.extend((
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
))
except ImportError:
pass
try:
from silk.webreq import (
B64Document, BaseRouter, Document, FormData, HTTP, Header, HeaderList,
PathRouter, Query, Redirect, Response, TextView, URI
)
__all__.extend((
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
))
except ImportError:
pass
|
__all__ = []
try:
from silk.webdoc import css, html, node
__all__ += ['css', 'html', 'node']
except ImportError:
pass
try:
from silk.webdb import (
AuthenticationError, BoolColumn, Column, DB, DataColumn,
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
__all__ += [
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
]
except ImportError:
pass
try:
from silk.webreq import (
B64Document, BaseRouter, Document, FormData, HTTP, Header, HeaderList,
PathRouter, Query, Redirect, Response, TextView, URI
)
__all__ += [
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
]
except ImportError:
pass
|
Use += to modify __all__, to appease flake8
|
Use += to modify __all__, to appease flake8
|
Python
|
bsd-3-clause
|
orbnauticus/silk
|
python
|
## Code Before:
__all__ = []
try:
from silk.webdoc import css, html, node
__all__.extend(('css', 'html', 'node'))
except ImportError:
pass
try:
from silk.webdb import (
AuthenticationError, BoolColumn, Column, DB, DataColumn,
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
__all__.extend((
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
))
except ImportError:
pass
try:
from silk.webreq import (
B64Document, BaseRouter, Document, FormData, HTTP, Header, HeaderList,
PathRouter, Query, Redirect, Response, TextView, URI
)
__all__.extend((
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
))
except ImportError:
pass
## Instruction:
Use += to modify __all__, to appease flake8
## Code After:
__all__ = []
try:
from silk.webdoc import css, html, node
__all__ += ['css', 'html', 'node']
except ImportError:
pass
try:
from silk.webdb import (
AuthenticationError, BoolColumn, Column, DB, DataColumn,
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
__all__ += [
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
]
except ImportError:
pass
try:
from silk.webreq import (
B64Document, BaseRouter, Document, FormData, HTTP, Header, HeaderList,
PathRouter, Query, Redirect, Response, TextView, URI
)
__all__ += [
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
]
except ImportError:
pass
|
// ... existing code ...
try:
from silk.webdoc import css, html, node
__all__ += ['css', 'html', 'node']
except ImportError:
pass
// ... modified code ...
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
__all__ += [
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
]
except ImportError:
pass
...
PathRouter, Query, Redirect, Response, TextView, URI
)
__all__ += [
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
]
except ImportError:
pass
// ... rest of the code ...
|
debf82416f3eaeeb520724aa2100392342c74e51
|
core/src/main/java/com/hp/autonomy/searchcomponents/core/search/DocumentsService.java
|
core/src/main/java/com/hp/autonomy/searchcomponents/core/search/DocumentsService.java
|
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.searchcomponents.core.search;
import com.hp.autonomy.types.requests.Documents;
import java.io.Serializable;
import java.util.List;
import java.util.Set;
public interface DocumentsService<S extends Serializable, D extends SearchResult, E extends Exception> {
String HIGHLIGHT_START_TAG = "<Find-IOD-QueryText-Placeholder>";
String HIGHLIGHT_END_TAG = "</Find-IOD-QueryText-Placeholder>";
Documents<D> queryTextIndex(final SearchRequest<S> searchRequest) throws E;
Documents<D> queryTextIndexForPromotions(final SearchRequest<S> searchRequest) throws E;
List<D> findSimilar(Set<S> indexes, String reference) throws E;
}
|
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.searchcomponents.core.search;
import com.hp.autonomy.types.requests.Documents;
import java.io.Serializable;
import java.util.List;
import java.util.Set;
public interface DocumentsService<S extends Serializable, D extends SearchResult, E extends Exception> {
String HIGHLIGHT_START_TAG = "<HavenSearch-QueryText-Placeholder>";
String HIGHLIGHT_END_TAG = "</HavenSearch-QueryText-Placeholder>";
Documents<D> queryTextIndex(final SearchRequest<S> searchRequest) throws E;
Documents<D> queryTextIndexForPromotions(final SearchRequest<S> searchRequest) throws E;
List<D> findSimilar(Set<S> indexes, String reference) throws E;
}
|
Change custom tag name [rev. matthew.gordon]
|
Change custom tag name [rev. matthew.gordon]
|
Java
|
mit
|
hpe-idol/haven-search-components,hpautonomy/haven-search-components,hpautonomy/haven-search-components,hpe-idol/haven-search-components
|
java
|
## Code Before:
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.searchcomponents.core.search;
import com.hp.autonomy.types.requests.Documents;
import java.io.Serializable;
import java.util.List;
import java.util.Set;
public interface DocumentsService<S extends Serializable, D extends SearchResult, E extends Exception> {
String HIGHLIGHT_START_TAG = "<Find-IOD-QueryText-Placeholder>";
String HIGHLIGHT_END_TAG = "</Find-IOD-QueryText-Placeholder>";
Documents<D> queryTextIndex(final SearchRequest<S> searchRequest) throws E;
Documents<D> queryTextIndexForPromotions(final SearchRequest<S> searchRequest) throws E;
List<D> findSimilar(Set<S> indexes, String reference) throws E;
}
## Instruction:
Change custom tag name [rev. matthew.gordon]
## Code After:
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.searchcomponents.core.search;
import com.hp.autonomy.types.requests.Documents;
import java.io.Serializable;
import java.util.List;
import java.util.Set;
public interface DocumentsService<S extends Serializable, D extends SearchResult, E extends Exception> {
String HIGHLIGHT_START_TAG = "<HavenSearch-QueryText-Placeholder>";
String HIGHLIGHT_END_TAG = "</HavenSearch-QueryText-Placeholder>";
Documents<D> queryTextIndex(final SearchRequest<S> searchRequest) throws E;
Documents<D> queryTextIndexForPromotions(final SearchRequest<S> searchRequest) throws E;
List<D> findSimilar(Set<S> indexes, String reference) throws E;
}
|
# ... existing code ...
import java.util.Set;
public interface DocumentsService<S extends Serializable, D extends SearchResult, E extends Exception> {
String HIGHLIGHT_START_TAG = "<HavenSearch-QueryText-Placeholder>";
String HIGHLIGHT_END_TAG = "</HavenSearch-QueryText-Placeholder>";
Documents<D> queryTextIndex(final SearchRequest<S> searchRequest) throws E;
# ... rest of the code ...
|
db769b79cf99203a93c441bcfe98c993aa0ca3b5
|
Sources/Cinput/input.c
|
Sources/Cinput/input.c
|
inline int input_event_get_sec(struct input_event event) {
return event.input_event_sec;
}
inline int input_event_get_usec(struct input_event event) {
return event.input_event_usec;
}
|
inline int input_event_get_sec(struct input_event event) {
#ifdef input_event_sec
return event.input_event_sec;
#else
return event.time.tv_sec
#endif
}
inline int input_event_get_usec(struct input_event event) {
#ifdef input_event_usec
return event.input_event_usec;
#else
return event.time.tv_usec
#endif
}
|
Fix compilation on older linux kernels
|
Fix compilation on older linux kernels
|
C
|
apache-2.0
|
sersoft-gmbh/DeviceInput,sersoft-gmbh/DeviceInput
|
c
|
## Code Before:
inline int input_event_get_sec(struct input_event event) {
return event.input_event_sec;
}
inline int input_event_get_usec(struct input_event event) {
return event.input_event_usec;
}
## Instruction:
Fix compilation on older linux kernels
## Code After:
inline int input_event_get_sec(struct input_event event) {
#ifdef input_event_sec
return event.input_event_sec;
#else
return event.time.tv_sec
#endif
}
inline int input_event_get_usec(struct input_event event) {
#ifdef input_event_usec
return event.input_event_usec;
#else
return event.time.tv_usec
#endif
}
|
// ... existing code ...
inline int input_event_get_sec(struct input_event event) {
#ifdef input_event_sec
return event.input_event_sec;
#else
return event.time.tv_sec
#endif
}
inline int input_event_get_usec(struct input_event event) {
#ifdef input_event_usec
return event.input_event_usec;
#else
return event.time.tv_usec
#endif
}
// ... rest of the code ...
|
29f847ae3dae83f4ab24b8a9f1ecffc74d49d68c
|
xchainer/array_node.h
|
xchainer/array_node.h
|
namespace xchainer {
class OpNode;
class ArrayNode {
public:
ArrayNode() = default;
const std::shared_ptr<OpNode>& next_node() { return next_node_; }
std::shared_ptr<const OpNode> next_node() const { return next_node_; }
void set_next_node(std::shared_ptr<OpNode> next_node) { next_node_ = std::move(next_node); }
const nonstd::optional<Array>& grad() const { return grad_; }
void set_grad(Array grad) { grad_.emplace(std::move(grad)); };
void ClearGrad() { grad_ = nonstd::nullopt; }
private:
std::shared_ptr<OpNode> next_node_;
nonstd::optional<Array> grad_;
};
} // xchainer
|
namespace xchainer {
class OpNode;
class ArrayNode {
public:
ArrayNode() = default;
const std::shared_ptr<OpNode>& next_node() { return next_node_; }
std::shared_ptr<const OpNode> next_node() const { return next_node_; }
void set_next_node(std::shared_ptr<OpNode> next_node) { next_node_ = std::move(next_node); }
const nonstd::optional<Array>& grad() const { return grad_; }
void set_grad(Array grad) { grad_.emplace(std::move(grad)); };
void ClearGrad() { grad_.reset(); }
private:
std::shared_ptr<OpNode> next_node_;
nonstd::optional<Array> grad_;
};
} // xchainer
|
Use reset() to make optional empty
|
Use reset() to make optional empty
|
C
|
mit
|
ktnyt/chainer,wkentaro/chainer,wkentaro/chainer,wkentaro/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,niboshi/chainer,keisuke-umezawa/chainer,hvy/chainer,keisuke-umezawa/chainer,tkerola/chainer,jnishi/chainer,chainer/chainer,keisuke-umezawa/chainer,ktnyt/chainer,okuta/chainer,jnishi/chainer,chainer/chainer,jnishi/chainer,pfnet/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer,hvy/chainer,keisuke-umezawa/chainer,jnishi/chainer,ktnyt/chainer,niboshi/chainer,okuta/chainer,chainer/chainer,hvy/chainer,chainer/chainer,niboshi/chainer
|
c
|
## Code Before:
namespace xchainer {
class OpNode;
class ArrayNode {
public:
ArrayNode() = default;
const std::shared_ptr<OpNode>& next_node() { return next_node_; }
std::shared_ptr<const OpNode> next_node() const { return next_node_; }
void set_next_node(std::shared_ptr<OpNode> next_node) { next_node_ = std::move(next_node); }
const nonstd::optional<Array>& grad() const { return grad_; }
void set_grad(Array grad) { grad_.emplace(std::move(grad)); };
void ClearGrad() { grad_ = nonstd::nullopt; }
private:
std::shared_ptr<OpNode> next_node_;
nonstd::optional<Array> grad_;
};
} // xchainer
## Instruction:
Use reset() to make optional empty
## Code After:
namespace xchainer {
class OpNode;
class ArrayNode {
public:
ArrayNode() = default;
const std::shared_ptr<OpNode>& next_node() { return next_node_; }
std::shared_ptr<const OpNode> next_node() const { return next_node_; }
void set_next_node(std::shared_ptr<OpNode> next_node) { next_node_ = std::move(next_node); }
const nonstd::optional<Array>& grad() const { return grad_; }
void set_grad(Array grad) { grad_.emplace(std::move(grad)); };
void ClearGrad() { grad_.reset(); }
private:
std::shared_ptr<OpNode> next_node_;
nonstd::optional<Array> grad_;
};
} // xchainer
|
...
void set_grad(Array grad) { grad_.emplace(std::move(grad)); };
void ClearGrad() { grad_.reset(); }
private:
std::shared_ptr<OpNode> next_node_;
...
|
787355ea64fb4a8967718cb1331b7cd1fa0d02e4
|
test/cases/vla/argument_eval.c
|
test/cases/vla/argument_eval.c
|
// RUN: %ocheck 0 %s
// RUN: %ocheck 0 %s -fstack-protector-all
extern void abort(void);
as, bs, fs;
static int a(){ as++; return 2; }
static int b(){ bs++; return 2; }
static int f(int p[a()][b()])
{
fs++;
return p[0][0] + p[0][1] + p[1][0] + p[1][1];
}
static void assert(_Bool b)
{
if(!b)
abort();
}
int main()
{
int ar[a()][b()];
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
ar[0][0] = 5;
ar[0][1] = 4;
ar[1][0] = 3;
ar[1][1] = 2;
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
assert(f(ar) == 14);
assert(as == 2);
assert(bs == 2);
assert(fs == 1);
return 0;
}
|
// RUN: %ocheck 0 %s
// RUN: %ocheck 0 %s -fstack-protector-all
extern void abort(void);
as, bs, fs;
static int a(){ as++; return 2; }
static int b(){ bs++; return 2; }
static int f(int p[a()][b()])
{
fs++;
return p[0][0] // 5
+ p[0][1] // 4
+ p[1][0] // 3
+ p[1][1] // 2
+ sizeof(p) // sizeof(T (*)[...]) = 8
+ sizeof(p[0]) // 2 * sizeof(int) = 8
+ sizeof(p[1][2]); // sizeof(int) = 4
}
static void assert(_Bool b)
{
if(!b)
abort();
}
int main()
{
int ar[a()][b()];
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
ar[0][0] = 5;
ar[0][1] = 4;
ar[1][0] = 3;
ar[1][1] = 2;
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
assert(f(ar) == 34);
assert(as == 2);
assert(bs == 2);
assert(fs == 1);
return 0;
}
|
Improve checks for vla-argument test
|
Improve checks for vla-argument test
|
C
|
mit
|
bobrippling/ucc-c-compiler,bobrippling/ucc-c-compiler,bobrippling/ucc-c-compiler
|
c
|
## Code Before:
// RUN: %ocheck 0 %s
// RUN: %ocheck 0 %s -fstack-protector-all
extern void abort(void);
as, bs, fs;
static int a(){ as++; return 2; }
static int b(){ bs++; return 2; }
static int f(int p[a()][b()])
{
fs++;
return p[0][0] + p[0][1] + p[1][0] + p[1][1];
}
static void assert(_Bool b)
{
if(!b)
abort();
}
int main()
{
int ar[a()][b()];
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
ar[0][0] = 5;
ar[0][1] = 4;
ar[1][0] = 3;
ar[1][1] = 2;
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
assert(f(ar) == 14);
assert(as == 2);
assert(bs == 2);
assert(fs == 1);
return 0;
}
## Instruction:
Improve checks for vla-argument test
## Code After:
// RUN: %ocheck 0 %s
// RUN: %ocheck 0 %s -fstack-protector-all
extern void abort(void);
as, bs, fs;
static int a(){ as++; return 2; }
static int b(){ bs++; return 2; }
static int f(int p[a()][b()])
{
fs++;
return p[0][0] // 5
+ p[0][1] // 4
+ p[1][0] // 3
+ p[1][1] // 2
+ sizeof(p) // sizeof(T (*)[...]) = 8
+ sizeof(p[0]) // 2 * sizeof(int) = 8
+ sizeof(p[1][2]); // sizeof(int) = 4
}
static void assert(_Bool b)
{
if(!b)
abort();
}
int main()
{
int ar[a()][b()];
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
ar[0][0] = 5;
ar[0][1] = 4;
ar[1][0] = 3;
ar[1][1] = 2;
assert(as == 1);
assert(bs == 1);
assert(fs == 0);
assert(f(ar) == 34);
assert(as == 2);
assert(bs == 2);
assert(fs == 1);
return 0;
}
|
// ... existing code ...
static int f(int p[a()][b()])
{
fs++;
return p[0][0] // 5
+ p[0][1] // 4
+ p[1][0] // 3
+ p[1][1] // 2
+ sizeof(p) // sizeof(T (*)[...]) = 8
+ sizeof(p[0]) // 2 * sizeof(int) = 8
+ sizeof(p[1][2]); // sizeof(int) = 4
}
static void assert(_Bool b)
// ... modified code ...
assert(bs == 1);
assert(fs == 0);
assert(f(ar) == 34);
assert(as == 2);
assert(bs == 2);
// ... rest of the code ...
|
c0238a985fecdec80c23e68beacee319befeedb2
|
app/src/test/kotlin/com/felipecosta/microservice/app/notes/frontcontroller/NotesFrontCommandTest.kt
|
app/src/test/kotlin/com/felipecosta/microservice/app/notes/frontcontroller/NotesFrontCommandTest.kt
|
package com.felipecosta.microservice.app.notes.frontcontroller
import com.felipecosta.microservice.server.Request
import com.felipecosta.microservice.server.Response
import com.felipecosta.microservice.server.renderer.Renderer
import io.mockk.every
import io.mockk.mockk
import org.junit.Assert.assertEquals
import org.junit.Test
class NotesFrontCommandTest {
private val renderer = mockk<Renderer>()
private val request = mockk<Request>()
private val notesFrontCommand = NotesFrontCommand()
@Test
fun givenOutputObjectWhenProcessThenVerifyResponse() {
val expectedBody = "Awesome output"
every {
renderer.render(Output("My First Website", "My Interesting Content"), "views/notes.html")
} answers {
expectedBody
}
notesFrontCommand.apply {
init(request, renderer)
notesFrontCommand.process()
}
assertEquals(Response(expectedBody, 200), notesFrontCommand.response)
}
}
|
package com.felipecosta.microservice.app.notes.frontcontroller
import com.felipecosta.microservice.server.Request
import com.felipecosta.microservice.server.Response
import com.felipecosta.microservice.server.renderer.Renderer
import io.mockk.every
import io.mockk.mockk
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
class NotesFrontCommandTest {
private val renderer = mockk<Renderer>()
private val request = mockk<Request>()
private val notesFrontCommand = NotesFrontCommand()
@Test
fun givenOutputObjectWhenProcessThenVerifyResponse() {
val expectedBody = "Awesome output"
every {
renderer.render(Output("My First Website", "My Interesting Content"), "views/notes.html")
} answers {
expectedBody
}
notesFrontCommand.apply {
init(request, renderer)
notesFrontCommand.process()
}
assertEquals(Response(expectedBody, 200), notesFrontCommand.response)
}
}
|
Migrate the mockk test sample to JUnit 5
|
Migrate the mockk test sample to JUnit 5
|
Kotlin
|
mit
|
fcostaa/kotlin-microservice,fcostaa/kotlin-microservice,fcostaa/kotlin-microservice
|
kotlin
|
## Code Before:
package com.felipecosta.microservice.app.notes.frontcontroller
import com.felipecosta.microservice.server.Request
import com.felipecosta.microservice.server.Response
import com.felipecosta.microservice.server.renderer.Renderer
import io.mockk.every
import io.mockk.mockk
import org.junit.Assert.assertEquals
import org.junit.Test
class NotesFrontCommandTest {
private val renderer = mockk<Renderer>()
private val request = mockk<Request>()
private val notesFrontCommand = NotesFrontCommand()
@Test
fun givenOutputObjectWhenProcessThenVerifyResponse() {
val expectedBody = "Awesome output"
every {
renderer.render(Output("My First Website", "My Interesting Content"), "views/notes.html")
} answers {
expectedBody
}
notesFrontCommand.apply {
init(request, renderer)
notesFrontCommand.process()
}
assertEquals(Response(expectedBody, 200), notesFrontCommand.response)
}
}
## Instruction:
Migrate the mockk test sample to JUnit 5
## Code After:
package com.felipecosta.microservice.app.notes.frontcontroller
import com.felipecosta.microservice.server.Request
import com.felipecosta.microservice.server.Response
import com.felipecosta.microservice.server.renderer.Renderer
import io.mockk.every
import io.mockk.mockk
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
class NotesFrontCommandTest {
private val renderer = mockk<Renderer>()
private val request = mockk<Request>()
private val notesFrontCommand = NotesFrontCommand()
@Test
fun givenOutputObjectWhenProcessThenVerifyResponse() {
val expectedBody = "Awesome output"
every {
renderer.render(Output("My First Website", "My Interesting Content"), "views/notes.html")
} answers {
expectedBody
}
notesFrontCommand.apply {
init(request, renderer)
notesFrontCommand.process()
}
assertEquals(Response(expectedBody, 200), notesFrontCommand.response)
}
}
|
# ... existing code ...
import com.felipecosta.microservice.server.renderer.Renderer
import io.mockk.every
import io.mockk.mockk
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
class NotesFrontCommandTest {
# ... rest of the code ...
|
b65658b70647af23db734c89bbd575fbcfc5cb04
|
source/pygmy-app/src/com/dev/pygmy/GameBoardInterfaceActivity.java
|
source/pygmy-app/src/com/dev/pygmy/GameBoardInterfaceActivity.java
|
package com.dev.pygmy;
import android.app.Activity;
import android.os.Bundle;
public class GameBoardInterfaceActivity extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(new GameBoardView(getApplicationContext()));;
}
}
|
package com.dev.pygmy;
import android.annotation.SuppressLint;
import android.app.ActionBar.LayoutParams;
import android.app.Activity;
import android.os.Bundle;
import android.view.ViewGroup;
import android.widget.FrameLayout;
@SuppressLint("ResourceAsColor")
public class GameBoardInterfaceActivity extends Activity {
static final String TAG = "Pygmy";
private GameBoardView gameBoardView = null;
private EntityView entityView = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
FrameLayout mainLayout = createMainLayout();
setContentView(mainLayout);
gameBoardView = new GameBoardView(getApplicationContext());
mainLayout.addView(gameBoardView);
entityView = new EntityView(this);
mainLayout.addView(entityView);
}
private FrameLayout createMainLayout() {
FrameLayout mainLayout = new FrameLayout(this);
LayoutParams gerenalLayoutParams = new LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
mainLayout.setLayoutParams(gerenalLayoutParams);
mainLayout.setBackgroundColor(R.color.blue);
return mainLayout;
}
}
|
Put GameBoardView and EntityView in the same layout
|
Put GameBoardView and EntityView in the same layout
|
Java
|
apache-2.0
|
elyas-bhy/pygmy,elyas-bhy/pygmy,elyas-bhy/pygmy
|
java
|
## Code Before:
package com.dev.pygmy;
import android.app.Activity;
import android.os.Bundle;
public class GameBoardInterfaceActivity extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(new GameBoardView(getApplicationContext()));;
}
}
## Instruction:
Put GameBoardView and EntityView in the same layout
## Code After:
package com.dev.pygmy;
import android.annotation.SuppressLint;
import android.app.ActionBar.LayoutParams;
import android.app.Activity;
import android.os.Bundle;
import android.view.ViewGroup;
import android.widget.FrameLayout;
@SuppressLint("ResourceAsColor")
public class GameBoardInterfaceActivity extends Activity {
static final String TAG = "Pygmy";
private GameBoardView gameBoardView = null;
private EntityView entityView = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
FrameLayout mainLayout = createMainLayout();
setContentView(mainLayout);
gameBoardView = new GameBoardView(getApplicationContext());
mainLayout.addView(gameBoardView);
entityView = new EntityView(this);
mainLayout.addView(entityView);
}
private FrameLayout createMainLayout() {
FrameLayout mainLayout = new FrameLayout(this);
LayoutParams gerenalLayoutParams = new LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
mainLayout.setLayoutParams(gerenalLayoutParams);
mainLayout.setBackgroundColor(R.color.blue);
return mainLayout;
}
}
|
# ... existing code ...
package com.dev.pygmy;
import android.annotation.SuppressLint;
import android.app.ActionBar.LayoutParams;
import android.app.Activity;
import android.os.Bundle;
import android.view.ViewGroup;
import android.widget.FrameLayout;
@SuppressLint("ResourceAsColor")
public class GameBoardInterfaceActivity extends Activity {
static final String TAG = "Pygmy";
private GameBoardView gameBoardView = null;
private EntityView entityView = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
FrameLayout mainLayout = createMainLayout();
setContentView(mainLayout);
gameBoardView = new GameBoardView(getApplicationContext());
mainLayout.addView(gameBoardView);
entityView = new EntityView(this);
mainLayout.addView(entityView);
}
private FrameLayout createMainLayout() {
FrameLayout mainLayout = new FrameLayout(this);
LayoutParams gerenalLayoutParams = new LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
mainLayout.setLayoutParams(gerenalLayoutParams);
mainLayout.setBackgroundColor(R.color.blue);
return mainLayout;
}
}
# ... rest of the code ...
|
6f8692d0345652acc5e4c858e0e2a3f688dc574f
|
project/views/twilioviews.py
|
project/views/twilioviews.py
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
import dateutil
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = dateutil.parser.parse(entities['time'][0]['value']['from'])
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
import dateutil
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = dateutil.parser.parse(entities['time'][0]['value']['from'])
text = entities['message'][0]['value']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
Select the message text in twilio
|
Select the message text in twilio
|
Python
|
apache-2.0
|
tjcsl/mhacksiv
|
python
|
## Code Before:
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
import dateutil
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = dateutil.parser.parse(entities['time'][0]['value']['from'])
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
## Instruction:
Select the message text in twilio
## Code After:
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
import dateutil
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = dateutil.parser.parse(entities['time'][0]['value']['from'])
text = entities['message'][0]['value']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
// ... existing code ...
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = dateutil.parser.parse(entities['time'][0]['value']['from'])
text = entities['message'][0]['value']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
// ... rest of the code ...
|
e3e11b566b4ee0227b783bb5a361bc37742170fd
|
OpenTreeMap/src/org/azavea/otm/data/ModelContainer.java
|
OpenTreeMap/src/org/azavea/otm/data/ModelContainer.java
|
package org.azavea.otm.data;
import org.json.JSONArray;
public abstract class ModelContainer<T> {
protected JSONArray data;
public void setData(JSONArray array) {
this.data = array;
}
public JSONArray getData() {
return data;
}
public abstract T[] getAll();
}
|
package org.azavea.otm.data;
import java.util.LinkedHashMap;
import org.json.JSONArray;
public abstract class ModelContainer<T> {
protected JSONArray data;
public void setData(JSONArray array) {
this.data = array;
}
public JSONArray getData() {
return data;
}
public abstract LinkedHashMap<Integer,T> getAll();
}
|
Change T[] to Hash for modelContainer
|
Change T[] to Hash for modelContainer
Using a LinkedHashMap we can do lookup by key/id and also keep the list
in the default ordering from which it comes through the API
|
Java
|
agpl-3.0
|
OpenTreeMap/otm-android,maurizi/otm-android,maurizi/otm-android,maurizi/otm-android,OpenTreeMap/otm-android,OpenTreeMap/otm-android
|
java
|
## Code Before:
package org.azavea.otm.data;
import org.json.JSONArray;
public abstract class ModelContainer<T> {
protected JSONArray data;
public void setData(JSONArray array) {
this.data = array;
}
public JSONArray getData() {
return data;
}
public abstract T[] getAll();
}
## Instruction:
Change T[] to Hash for modelContainer
Using a LinkedHashMap we can do lookup by key/id and also keep the list
in the default ordering from which it comes through the API
## Code After:
package org.azavea.otm.data;
import java.util.LinkedHashMap;
import org.json.JSONArray;
public abstract class ModelContainer<T> {
protected JSONArray data;
public void setData(JSONArray array) {
this.data = array;
}
public JSONArray getData() {
return data;
}
public abstract LinkedHashMap<Integer,T> getAll();
}
|
// ... existing code ...
package org.azavea.otm.data;
import java.util.LinkedHashMap;
import org.json.JSONArray;
// ... modified code ...
return data;
}
public abstract LinkedHashMap<Integer,T> getAll();
}
// ... rest of the code ...
|
71d5277ae6ee97ab357c1018b2fc46dd2648c7fa
|
exercises/reverse-string/src/main/java/ReverseString.java
|
exercises/reverse-string/src/main/java/ReverseString.java
|
class ReverseString {
String reverse() {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
}
|
class ReverseString {
String reverse(String stringToReverse) {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
}
|
Change method reverse method signature to follow tests reqs
|
Change method reverse method signature to follow tests reqs
|
Java
|
mit
|
exercism/xjava,exercism/xjava,FridaTveit/xjava,FridaTveit/xjava,jmluy/xjava,jmluy/xjava
|
java
|
## Code Before:
class ReverseString {
String reverse() {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
}
## Instruction:
Change method reverse method signature to follow tests reqs
## Code After:
class ReverseString {
String reverse(String stringToReverse) {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
}
|
# ... existing code ...
class ReverseString {
String reverse(String stringToReverse) {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
# ... rest of the code ...
|
0b05b0dbf72651092cfdbfe56003384bf58d64fd
|
src/main/java/com/github/pedrovgs/problem63/ConstantComplexityOrderStack.java
|
src/main/java/com/github/pedrovgs/problem63/ConstantComplexityOrderStack.java
|
/*
* Copyright (C) 2014 Pedro Vicente Gómez Sánchez.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pedrovgs.problem63;
/**
* How would you design an integer stack which, in addition to push and pop, also has a function
* min which returns the minimum element? Push, pop and min should all operate in 0(1) time.
*
* @author Pedro Vicente Gómez Sánchez.
*/
public class ConstantComplexityOrderStack {
}
|
/*
* Copyright (C) 2014 Pedro Vicente Gómez Sánchez.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pedrovgs.problem63;
import java.util.Stack;
/**
* How would you design an integer stack which, in addition to push and pop, also has a function
* min which returns the minimum element? Push, pop and min should all operate in 0(1) time.
*
* @author Pedro Vicente Gómez Sánchez.
*/
public class ConstantComplexityOrderStack {
/**
* Approach based on a inner data structure to store the stack of integers and in the same object
* the current min value we had when the element was pushed. With this approach we get a time
* complexity order equals to O(1) but a space complexity order equals to O(N) where N is the
* number of elements in the stack.
*/
public static class Stack1 extends Stack<MinValue> {
public void push(int item) {
int newMin = Math.min(item, getMin());
super.push(new MinValue(item, newMin));
}
public int getMin() {
if (isEmpty()) {
return Integer.MAX_VALUE;
} else {
return peek().minValue;
}
}
}
static class MinValue {
final int value;
final int minValue;
MinValue(int value, int minValue) {
this.value = value;
this.minValue = minValue;
}
}
}
|
Add first implementation to problem 63
|
Add first implementation to problem 63
|
Java
|
apache-2.0
|
pedrovgs/Algorithms,VeskoI/Algorithms,AppScientist/Algorithms,inexistence/Algorithms,mrgenco/Algorithms-1,007slm/Algorithms,Arkar-Aung/Algorithms,ArlenLiu/Algorithms,ajinkyakolhe112/Algorithms,zhdh2008/Algorithms,chengjinqian/Algorithms,jibaro/Algorithms,JeffreyWei/Algorithms,zmywly8866/Algorithms,Ariloum/Algorithms,sridhar-newsdistill/Algorithms
|
java
|
## Code Before:
/*
* Copyright (C) 2014 Pedro Vicente Gómez Sánchez.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pedrovgs.problem63;
/**
* How would you design an integer stack which, in addition to push and pop, also has a function
* min which returns the minimum element? Push, pop and min should all operate in 0(1) time.
*
* @author Pedro Vicente Gómez Sánchez.
*/
public class ConstantComplexityOrderStack {
}
## Instruction:
Add first implementation to problem 63
## Code After:
/*
* Copyright (C) 2014 Pedro Vicente Gómez Sánchez.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pedrovgs.problem63;
import java.util.Stack;
/**
* How would you design an integer stack which, in addition to push and pop, also has a function
* min which returns the minimum element? Push, pop and min should all operate in 0(1) time.
*
* @author Pedro Vicente Gómez Sánchez.
*/
public class ConstantComplexityOrderStack {
/**
* Approach based on a inner data structure to store the stack of integers and in the same object
* the current min value we had when the element was pushed. With this approach we get a time
* complexity order equals to O(1) but a space complexity order equals to O(N) where N is the
* number of elements in the stack.
*/
public static class Stack1 extends Stack<MinValue> {
public void push(int item) {
int newMin = Math.min(item, getMin());
super.push(new MinValue(item, newMin));
}
public int getMin() {
if (isEmpty()) {
return Integer.MAX_VALUE;
} else {
return peek().minValue;
}
}
}
static class MinValue {
final int value;
final int minValue;
MinValue(int value, int minValue) {
this.value = value;
this.minValue = minValue;
}
}
}
|
// ... existing code ...
*/
package com.github.pedrovgs.problem63;
import java.util.Stack;
/**
* How would you design an integer stack which, in addition to push and pop, also has a function
* min which returns the minimum element? Push, pop and min should all operate in 0(1) time.
// ... modified code ...
*/
public class ConstantComplexityOrderStack {
/**
* Approach based on a inner data structure to store the stack of integers and in the same object
* the current min value we had when the element was pushed. With this approach we get a time
* complexity order equals to O(1) but a space complexity order equals to O(N) where N is the
* number of elements in the stack.
*/
public static class Stack1 extends Stack<MinValue> {
public void push(int item) {
int newMin = Math.min(item, getMin());
super.push(new MinValue(item, newMin));
}
public int getMin() {
if (isEmpty()) {
return Integer.MAX_VALUE;
} else {
return peek().minValue;
}
}
}
static class MinValue {
final int value;
final int minValue;
MinValue(int value, int minValue) {
this.value = value;
this.minValue = minValue;
}
}
}
// ... rest of the code ...
|
a58a1f511e0dfb54ca5168180e9f191340f7afde
|
osgtest/tests/test_11_condor_cron.py
|
osgtest/tests/test_11_condor_cron.py
|
import os
import osgtest.library.core as core
import unittest
class TestStartCondorCron(unittest.TestCase):
def test_01_start_condor_cron(self):
core.config['condor-cron.lockfile'] = '/var/lock/subsys/condor-cron'
core.state['condor-cron.started-service'] = False
core.state['condor-cron.running-service'] = False
if core.missing_rpm('condor-cron'):
return
if os.path.exists(core.config['condor-cron.lockfile']):
core.state['condor-cron.running-service'] = True
core.skip('already running')
return
command = ('service', 'condor-cron', 'start')
stdout, _, fail = core.check_system(command, 'Start Condor-Cron')
self.assert_(stdout.find('error') == -1, fail)
self.assert_(os.path.exists(core.config['condor-cron.lockfile']),
'Condor-Cron run lock file missing')
core.state['condor-cron.started-service'] = True
core.state['condor-cron.running-service'] = True
|
import os
from osgtest.library import core, osgunittest
import unittest
class TestStartCondorCron(osgunittest.OSGTestCase):
def test_01_start_condor_cron(self):
core.config['condor-cron.lockfile'] = '/var/lock/subsys/condor-cron'
core.state['condor-cron.started-service'] = False
core.state['condor-cron.running-service'] = False
core.skip_ok_unless_installed('condor-cron')
if os.path.exists(core.config['condor-cron.lockfile']):
core.state['condor-cron.running-service'] = True
self.skip_ok('already running')
command = ('service', 'condor-cron', 'start')
stdout, _, fail = core.check_system(command, 'Start Condor-Cron')
self.assert_(stdout.find('error') == -1, fail)
self.assert_(os.path.exists(core.config['condor-cron.lockfile']),
'Condor-Cron run lock file missing')
core.state['condor-cron.started-service'] = True
core.state['condor-cron.running-service'] = True
|
Update 11_condor_cron to use OkSkip functionality
|
Update 11_condor_cron to use OkSkip functionality
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@16523 4e558342-562e-0410-864c-e07659590f8c
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
python
|
## Code Before:
import os
import osgtest.library.core as core
import unittest
class TestStartCondorCron(unittest.TestCase):
def test_01_start_condor_cron(self):
core.config['condor-cron.lockfile'] = '/var/lock/subsys/condor-cron'
core.state['condor-cron.started-service'] = False
core.state['condor-cron.running-service'] = False
if core.missing_rpm('condor-cron'):
return
if os.path.exists(core.config['condor-cron.lockfile']):
core.state['condor-cron.running-service'] = True
core.skip('already running')
return
command = ('service', 'condor-cron', 'start')
stdout, _, fail = core.check_system(command, 'Start Condor-Cron')
self.assert_(stdout.find('error') == -1, fail)
self.assert_(os.path.exists(core.config['condor-cron.lockfile']),
'Condor-Cron run lock file missing')
core.state['condor-cron.started-service'] = True
core.state['condor-cron.running-service'] = True
## Instruction:
Update 11_condor_cron to use OkSkip functionality
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@16523 4e558342-562e-0410-864c-e07659590f8c
## Code After:
import os
from osgtest.library import core, osgunittest
import unittest
class TestStartCondorCron(osgunittest.OSGTestCase):
def test_01_start_condor_cron(self):
core.config['condor-cron.lockfile'] = '/var/lock/subsys/condor-cron'
core.state['condor-cron.started-service'] = False
core.state['condor-cron.running-service'] = False
core.skip_ok_unless_installed('condor-cron')
if os.path.exists(core.config['condor-cron.lockfile']):
core.state['condor-cron.running-service'] = True
self.skip_ok('already running')
command = ('service', 'condor-cron', 'start')
stdout, _, fail = core.check_system(command, 'Start Condor-Cron')
self.assert_(stdout.find('error') == -1, fail)
self.assert_(os.path.exists(core.config['condor-cron.lockfile']),
'Condor-Cron run lock file missing')
core.state['condor-cron.started-service'] = True
core.state['condor-cron.running-service'] = True
|
# ... existing code ...
import os
from osgtest.library import core, osgunittest
import unittest
class TestStartCondorCron(osgunittest.OSGTestCase):
def test_01_start_condor_cron(self):
core.config['condor-cron.lockfile'] = '/var/lock/subsys/condor-cron'
# ... modified code ...
core.state['condor-cron.started-service'] = False
core.state['condor-cron.running-service'] = False
core.skip_ok_unless_installed('condor-cron')
if os.path.exists(core.config['condor-cron.lockfile']):
core.state['condor-cron.running-service'] = True
self.skip_ok('already running')
command = ('service', 'condor-cron', 'start')
stdout, _, fail = core.check_system(command, 'Start Condor-Cron')
# ... rest of the code ...
|
4a243af780afa91bc45377560b469a15613a5125
|
primitiv/c/cuda_device.h
|
primitiv/c/cuda_device.h
|
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param device Pointer to receive a handler.
* @return Status code.
* @remarks The random number generator is initialized using
* `std::random_device`.
*/
extern PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new(
uint32_t device_id, primitiv_Device **device);
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param rng_seed The seed value of the random number generator.
* @param device Pointer to receive a handler.
* @return Status code.
*/
extern PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new_with_seed(
uint32_t device_id, uint32_t rng_seed, primitiv_Device **device);
/**
* Retrieves the number of active hardwares.
* @param num_devices Pointer to receive the number of active hardwares.
* @return Status code.
*/
extern PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_num_devices(
uint32_t *num_devices);
#endif // PRIMITIV_C_CUDA_DEVICE_H_
|
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param device Pointer to receive a handler.
* @return Status code.
* @remarks The random number generator is initialized using
* `std::random_device`.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new(
uint32_t device_id, primitiv_Device **device);
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param rng_seed The seed value of the random number generator.
* @param device Pointer to receive a handler.
* @return Status code.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new_with_seed(
uint32_t device_id, uint32_t rng_seed, primitiv_Device **device);
/**
* Retrieves the number of active hardwares.
* @param num_devices Pointer to receive the number of active hardwares.
* @return Status code.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_num_devices(
uint32_t *num_devices);
#endif // PRIMITIV_C_CUDA_DEVICE_H_
|
Fix calling conventions around CUDA.
|
Fix calling conventions around CUDA.
|
C
|
apache-2.0
|
odashi/primitiv,odashi/primitiv,odashi/primitiv
|
c
|
## Code Before:
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param device Pointer to receive a handler.
* @return Status code.
* @remarks The random number generator is initialized using
* `std::random_device`.
*/
extern PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new(
uint32_t device_id, primitiv_Device **device);
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param rng_seed The seed value of the random number generator.
* @param device Pointer to receive a handler.
* @return Status code.
*/
extern PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new_with_seed(
uint32_t device_id, uint32_t rng_seed, primitiv_Device **device);
/**
* Retrieves the number of active hardwares.
* @param num_devices Pointer to receive the number of active hardwares.
* @return Status code.
*/
extern PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_num_devices(
uint32_t *num_devices);
#endif // PRIMITIV_C_CUDA_DEVICE_H_
## Instruction:
Fix calling conventions around CUDA.
## Code After:
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param device Pointer to receive a handler.
* @return Status code.
* @remarks The random number generator is initialized using
* `std::random_device`.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new(
uint32_t device_id, primitiv_Device **device);
/**
* Creates a new Device object.
* @param device_id ID of the physical GPU.
* @param rng_seed The seed value of the random number generator.
* @param device Pointer to receive a handler.
* @return Status code.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new_with_seed(
uint32_t device_id, uint32_t rng_seed, primitiv_Device **device);
/**
* Retrieves the number of active hardwares.
* @param num_devices Pointer to receive the number of active hardwares.
* @return Status code.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_num_devices(
uint32_t *num_devices);
#endif // PRIMITIV_C_CUDA_DEVICE_H_
|
// ... existing code ...
* @remarks The random number generator is initialized using
* `std::random_device`.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new(
uint32_t device_id, primitiv_Device **device);
/**
// ... modified code ...
* @param device Pointer to receive a handler.
* @return Status code.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_new_with_seed(
uint32_t device_id, uint32_t rng_seed, primitiv_Device **device);
/**
...
* @param num_devices Pointer to receive the number of active hardwares.
* @return Status code.
*/
PRIMITIV_C_API PRIMITIV_C_STATUS primitiv_devices_CUDA_num_devices(
uint32_t *num_devices);
#endif // PRIMITIV_C_CUDA_DEVICE_H_
// ... rest of the code ...
|
ba2913658e3770ef73d0e7972435def32199cc08
|
test.py
|
test.py
|
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
x_train, y_train = generate_data(1000)
x_test, y_test = generate_data(10)
model.fit(x_train, y_train, epochs=1000, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
print(loss_and_metrics)
|
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
#data = np.random.random((10000, 2))
#labels = np.sum(data, (1,))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='mse',
metrics=['accuracy'])
x_train, y_train = generate_data(10000)
x_test, y_test = generate_data(100)
model.fit(x_train, y_train, epochs=30, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=16)
#print(loss_and_metrics)
pred = model.predict(x_test, batch_size=32, verbose=0)
print("expected:")
print(y_test)
print("actual:")
print(pred)
|
Fix lirear regression model loss calculation
|
Fix lirear regression model loss calculation
|
Python
|
apache-2.0
|
alexkorep/dogs-vs-cats
|
python
|
## Code Before:
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
x_train, y_train = generate_data(1000)
x_test, y_test = generate_data(10)
model.fit(x_train, y_train, epochs=1000, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
print(loss_and_metrics)
## Instruction:
Fix lirear regression model loss calculation
## Code After:
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
def generate_data(size):
""""
x_train = []
y_train = []
for i in range(size):
x = random.randint(0, 100)
y = 2*x
x_train.append(x)
y_train.append(y)
return np.array(x_train), np.array(y_train)
"""
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
#data = np.random.random((10000, 2))
#labels = np.sum(data, (1,))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='mse',
metrics=['accuracy'])
x_train, y_train = generate_data(10000)
x_test, y_test = generate_data(100)
model.fit(x_train, y_train, epochs=30, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=16)
#print(loss_and_metrics)
pred = model.predict(x_test, batch_size=32, verbose=0)
print("expected:")
print(y_test)
print("actual:")
print(pred)
|
...
import numpy as np
#data = np.random.random((10000, 100))
#labels = np.random.randint(2, size=(10000, 1))
#data = np.random.random((10000, 2))
#labels = np.sum(data, (1,))
data = np.random.random((10000, 1))
labels = data*2
return data, labels
...
model = Sequential()
model.add(Dense(1, input_dim=1))
model.compile(optimizer='rmsprop',
loss='mse',
metrics=['accuracy'])
x_train, y_train = generate_data(10000)
x_test, y_test = generate_data(100)
model.fit(x_train, y_train, epochs=30, batch_size=32)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=16)
#print(loss_and_metrics)
pred = model.predict(x_test, batch_size=32, verbose=0)
print("expected:")
print(y_test)
print("actual:")
print(pred)
...
|
a9a55f87abc0a26d41e3fa3091f2f2efad7a2543
|
autoencoder/encode.py
|
autoencoder/encode.py
|
import numpy as np
from .network import autoencoder, get_encoder
from .io import read_records, load_model
def encode(input_file, output_file, log_dir):
X = read_records(input_file)
size = X.shape[1]
model = load_model(log_dir)
encoder = get_encoder(model)
predictions = encoder.predict(X)
np.savetxt(output_file, predictions)
def encode_with_args(args):
encode(input_file = args.dataset,
output_file = args.outputfile,
log_dir = args.logdir)
|
import numpy as np
from .network import autoencoder, get_encoder
from .io import read_records, load_model
def encode(input_file, output_file, log_dir):
X = read_records(input_file)
size = X.shape[1]
model = load_model(log_dir)
assert model.input_shape[1] == size, \
'Input size of data and pretrained model must be same'
encoder = get_encoder(model)
predictions = encoder.predict(X)
np.savetxt(output_file, predictions)
def encode_with_args(args):
encode(input_file = args.dataset,
output_file = args.outputfile,
log_dir = args.logdir)
|
Check input dimensions of pretrained model and input file
|
Check input dimensions of pretrained model and input file
|
Python
|
apache-2.0
|
theislab/dca,theislab/dca,theislab/dca
|
python
|
## Code Before:
import numpy as np
from .network import autoencoder, get_encoder
from .io import read_records, load_model
def encode(input_file, output_file, log_dir):
X = read_records(input_file)
size = X.shape[1]
model = load_model(log_dir)
encoder = get_encoder(model)
predictions = encoder.predict(X)
np.savetxt(output_file, predictions)
def encode_with_args(args):
encode(input_file = args.dataset,
output_file = args.outputfile,
log_dir = args.logdir)
## Instruction:
Check input dimensions of pretrained model and input file
## Code After:
import numpy as np
from .network import autoencoder, get_encoder
from .io import read_records, load_model
def encode(input_file, output_file, log_dir):
X = read_records(input_file)
size = X.shape[1]
model = load_model(log_dir)
assert model.input_shape[1] == size, \
'Input size of data and pretrained model must be same'
encoder = get_encoder(model)
predictions = encoder.predict(X)
np.savetxt(output_file, predictions)
def encode_with_args(args):
encode(input_file = args.dataset,
output_file = args.outputfile,
log_dir = args.logdir)
|
// ... existing code ...
size = X.shape[1]
model = load_model(log_dir)
assert model.input_shape[1] == size, \
'Input size of data and pretrained model must be same'
encoder = get_encoder(model)
predictions = encoder.predict(X)
np.savetxt(output_file, predictions)
// ... rest of the code ...
|
1e4986369c00ca4bbccff88d57eea64b1808cb00
|
src/main/kotlin/ml/duncte123/skybot/commands/essentials/UpdateCommand.kt
|
src/main/kotlin/ml/duncte123/skybot/commands/essentials/UpdateCommand.kt
|
package ml.duncte123.skybot.commands.essentials
import ml.duncte123.skybot.objects.command.Command
import ml.duncte123.skybot.objects.command.CommandCategory
import ml.duncte123.skybot.utils.AirUtils
import ml.duncte123.skybot.utils.Settings
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent
import java.util.*
class UpdateCommand: Command() {
init {
this.category = CommandCategory.UNLISTED
}
override fun executeCommand(invoke: String, args: Array<out String>?, event: GuildMessageReceivedEvent) {
if (!Arrays.asList<String>(*Settings.wbkxwkZPaG4ni5lm8laY).contains(event.author.id)
&& Settings.ownerId != event.author.id) {
event.channel.sendMessage(":x: ***YOU ARE DEFINITELY THE OWNER OF THIS BOT***").queue()
sendError(event.message)
return
}
event.channel.sendMessage(":x: Goodbye").queue()
// This will also shutdown eval
event.jda.asBot().shardManager.shutdown()
// Stop everything that my be using resources
AirUtils.stop()
// Magic code. Tell the updater to update
System.exit(0x5454)
}
override fun help()= "Update the bot and restart"
override fun getName()= "update"
}
|
package ml.duncte123.skybot.commands.essentials
import ml.duncte123.skybot.objects.command.Command
import ml.duncte123.skybot.objects.command.CommandCategory
import ml.duncte123.skybot.utils.AirUtils
import ml.duncte123.skybot.utils.Settings
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent
import java.util.*
class UpdateCommand: Command() {
init {
this.category = CommandCategory.UNLISTED
}
override fun executeCommand(invoke: String, args: Array<out String>?, event: GuildMessageReceivedEvent) {
if (!Arrays.asList<String>(*Settings.wbkxwkZPaG4ni5lm8laY).contains(event.author.id)
&& Settings.ownerId != event.author.id) {
event.channel.sendMessage(":x: ***YOU ARE DEFINITELY THE OWNER OF THIS BOT***").queue()
sendError(event.message)
return
}
event.channel.sendMessage(":heavy_check_mark: Goodbye").queue()
// This will also shutdown eval
event.jda.asBot().shardManager.shutdown()
// Stop everything that my be using resources
AirUtils.stop()
// Magic code. Tell the updater to update
System.exit(0x5454)
}
override fun help()= "Update the bot and restart"
override fun getName()= "update"
}
|
Use ✔ instead of ❌ on update
|
Use ✔ instead of ❌ on update
|
Kotlin
|
agpl-3.0
|
duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot
|
kotlin
|
## Code Before:
package ml.duncte123.skybot.commands.essentials
import ml.duncte123.skybot.objects.command.Command
import ml.duncte123.skybot.objects.command.CommandCategory
import ml.duncte123.skybot.utils.AirUtils
import ml.duncte123.skybot.utils.Settings
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent
import java.util.*
class UpdateCommand: Command() {
init {
this.category = CommandCategory.UNLISTED
}
override fun executeCommand(invoke: String, args: Array<out String>?, event: GuildMessageReceivedEvent) {
if (!Arrays.asList<String>(*Settings.wbkxwkZPaG4ni5lm8laY).contains(event.author.id)
&& Settings.ownerId != event.author.id) {
event.channel.sendMessage(":x: ***YOU ARE DEFINITELY THE OWNER OF THIS BOT***").queue()
sendError(event.message)
return
}
event.channel.sendMessage(":x: Goodbye").queue()
// This will also shutdown eval
event.jda.asBot().shardManager.shutdown()
// Stop everything that my be using resources
AirUtils.stop()
// Magic code. Tell the updater to update
System.exit(0x5454)
}
override fun help()= "Update the bot and restart"
override fun getName()= "update"
}
## Instruction:
Use ✔ instead of ❌ on update
## Code After:
package ml.duncte123.skybot.commands.essentials
import ml.duncte123.skybot.objects.command.Command
import ml.duncte123.skybot.objects.command.CommandCategory
import ml.duncte123.skybot.utils.AirUtils
import ml.duncte123.skybot.utils.Settings
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent
import java.util.*
class UpdateCommand: Command() {
init {
this.category = CommandCategory.UNLISTED
}
override fun executeCommand(invoke: String, args: Array<out String>?, event: GuildMessageReceivedEvent) {
if (!Arrays.asList<String>(*Settings.wbkxwkZPaG4ni5lm8laY).contains(event.author.id)
&& Settings.ownerId != event.author.id) {
event.channel.sendMessage(":x: ***YOU ARE DEFINITELY THE OWNER OF THIS BOT***").queue()
sendError(event.message)
return
}
event.channel.sendMessage(":heavy_check_mark: Goodbye").queue()
// This will also shutdown eval
event.jda.asBot().shardManager.shutdown()
// Stop everything that my be using resources
AirUtils.stop()
// Magic code. Tell the updater to update
System.exit(0x5454)
}
override fun help()= "Update the bot and restart"
override fun getName()= "update"
}
|
// ... existing code ...
return
}
event.channel.sendMessage(":heavy_check_mark: Goodbye").queue()
// This will also shutdown eval
event.jda.asBot().shardManager.shutdown()
// ... rest of the code ...
|
57e1bfc7baa73a09152f5db8af48da9eae77d8ea
|
supermarket/src/integrationTest/java/com/akikanellis/kata01/TenPercentOffEverything.java
|
supermarket/src/integrationTest/java/com/akikanellis/kata01/TenPercentOffEverything.java
|
package com.akikanellis.kata01;
import com.akikanellis.kata01.item.Items;
import com.akikanellis.kata01.item.QuantifiedItem;
import com.akikanellis.kata01.offer.Offer;
import com.akikanellis.kata01.offer.OfferStrategy;
import com.akikanellis.kata01.offer.Offers;
import com.akikanellis.kata01.offer.QuantifiedOffer;
import com.akikanellis.kata01.price.Price;
public class TenPercentOffEverything extends OfferStrategy {
private TenPercentOffEverything(long id) { super(id, "10% off everything"); }
public static OfferStrategy create(int id) { return new TenPercentOffEverything(id); }
@Override public Offers calculateOffers(Items items) {
Price itemsPrice = items.stream()
.map(QuantifiedItem::totalPrice)
.reduce(Price::add)
.orElse(Price.ZERO);
Price discount = itemsPrice.multiplyBy(0.10).negate();
Offer offer = Offer.create(description(), discount);
QuantifiedOffer quantifiedOffer = QuantifiedOffer.create(offer, 1);
return Offers.fromSingle(quantifiedOffer);
}
}
|
package com.akikanellis.kata01;
import com.akikanellis.kata01.item.Items;
import com.akikanellis.kata01.item.QuantifiedItem;
import com.akikanellis.kata01.offer.Offer;
import com.akikanellis.kata01.offer.OfferStrategy;
import com.akikanellis.kata01.offer.Offers;
import com.akikanellis.kata01.offer.QuantifiedOffer;
import com.akikanellis.kata01.price.Price;
/**
* An example offer strategy used for testing where all of our items are discounted by 10%.
*/
public class TenPercentOffEverything extends OfferStrategy {
private static final double TEN_PERCENT = 0.10;
private TenPercentOffEverything(long id) { super(id, "10% off everything"); }
public static OfferStrategy create(int id) { return new TenPercentOffEverything(id); }
@Override public Offers calculateOffers(Items items) {
Price itemsTotalPrice = calculateItemsTotalPrice(items);
Price discount = itemsTotalPrice.multiplyBy(TEN_PERCENT).negate();
Offer offer = Offer.create(description(), discount);
QuantifiedOffer quantifiedOffer = QuantifiedOffer.create(offer, 1);
return Offers.fromSingle(quantifiedOffer);
}
private Price calculateItemsTotalPrice(Items items) {
return items.stream()
.map(QuantifiedItem::totalPrice)
.reduce(Price::add)
.orElse(Price.ZERO);
}
}
|
Refactor ten percent off everything strategy
|
Refactor ten percent off everything strategy
|
Java
|
mit
|
AkiKanellis/kata01-supermarket-pricing,AkiKanellis/kata01-supermarket-pricing
|
java
|
## Code Before:
package com.akikanellis.kata01;
import com.akikanellis.kata01.item.Items;
import com.akikanellis.kata01.item.QuantifiedItem;
import com.akikanellis.kata01.offer.Offer;
import com.akikanellis.kata01.offer.OfferStrategy;
import com.akikanellis.kata01.offer.Offers;
import com.akikanellis.kata01.offer.QuantifiedOffer;
import com.akikanellis.kata01.price.Price;
public class TenPercentOffEverything extends OfferStrategy {
private TenPercentOffEverything(long id) { super(id, "10% off everything"); }
public static OfferStrategy create(int id) { return new TenPercentOffEverything(id); }
@Override public Offers calculateOffers(Items items) {
Price itemsPrice = items.stream()
.map(QuantifiedItem::totalPrice)
.reduce(Price::add)
.orElse(Price.ZERO);
Price discount = itemsPrice.multiplyBy(0.10).negate();
Offer offer = Offer.create(description(), discount);
QuantifiedOffer quantifiedOffer = QuantifiedOffer.create(offer, 1);
return Offers.fromSingle(quantifiedOffer);
}
}
## Instruction:
Refactor ten percent off everything strategy
## Code After:
package com.akikanellis.kata01;
import com.akikanellis.kata01.item.Items;
import com.akikanellis.kata01.item.QuantifiedItem;
import com.akikanellis.kata01.offer.Offer;
import com.akikanellis.kata01.offer.OfferStrategy;
import com.akikanellis.kata01.offer.Offers;
import com.akikanellis.kata01.offer.QuantifiedOffer;
import com.akikanellis.kata01.price.Price;
/**
* An example offer strategy used for testing where all of our items are discounted by 10%.
*/
public class TenPercentOffEverything extends OfferStrategy {
private static final double TEN_PERCENT = 0.10;
private TenPercentOffEverything(long id) { super(id, "10% off everything"); }
public static OfferStrategy create(int id) { return new TenPercentOffEverything(id); }
@Override public Offers calculateOffers(Items items) {
Price itemsTotalPrice = calculateItemsTotalPrice(items);
Price discount = itemsTotalPrice.multiplyBy(TEN_PERCENT).negate();
Offer offer = Offer.create(description(), discount);
QuantifiedOffer quantifiedOffer = QuantifiedOffer.create(offer, 1);
return Offers.fromSingle(quantifiedOffer);
}
private Price calculateItemsTotalPrice(Items items) {
return items.stream()
.map(QuantifiedItem::totalPrice)
.reduce(Price::add)
.orElse(Price.ZERO);
}
}
|
// ... existing code ...
import com.akikanellis.kata01.offer.QuantifiedOffer;
import com.akikanellis.kata01.price.Price;
/**
* An example offer strategy used for testing where all of our items are discounted by 10%.
*/
public class TenPercentOffEverything extends OfferStrategy {
private static final double TEN_PERCENT = 0.10;
private TenPercentOffEverything(long id) { super(id, "10% off everything"); }
// ... modified code ...
public static OfferStrategy create(int id) { return new TenPercentOffEverything(id); }
@Override public Offers calculateOffers(Items items) {
Price itemsTotalPrice = calculateItemsTotalPrice(items);
Price discount = itemsTotalPrice.multiplyBy(TEN_PERCENT).negate();
Offer offer = Offer.create(description(), discount);
QuantifiedOffer quantifiedOffer = QuantifiedOffer.create(offer, 1);
return Offers.fromSingle(quantifiedOffer);
}
private Price calculateItemsTotalPrice(Items items) {
return items.stream()
.map(QuantifiedItem::totalPrice)
.reduce(Price::add)
.orElse(Price.ZERO);
}
}
// ... rest of the code ...
|
a6491e62201e070665020e8e123d1cd65fc2cca6
|
Examples/THINGS/submit_all_THINGS.py
|
Examples/THINGS/submit_all_THINGS.py
|
import os
'''
Submits a job for every sample defined in the info dict
'''
script_path = "/lustre/home/ekoch/code_repos/BaSiCs/Examples/THINGS/"
submit_file = os.path.join(script_path, "submit_THINGS.pbs")
# Load in the info dict for the names
execfile(os.path.join(script_path, "info_THINGS.py"))
datapath = "/lustre/home/ekoch/THINGS/"
for name in galaxy_props:
galaxy_path = os.path.join(datapath, name)
# Now submit it!
os.system("qsub -v INP={1} {0}".format(submit_file, galaxy_path))
|
import os
from datetime import datetime
'''
Submits a job for every sample defined in the info dict
'''
def timestring():
return datetime.now().strftime("%Y%m%d%H%M%S%f")
script_path = "/lustre/home/ekoch/code_repos/BaSiCs/Examples/THINGS/"
submit_file = os.path.join(script_path, "submit_THINGS.pbs")
# Load in the info dict for the names
execfile(os.path.join(script_path, "info_THINGS.py"))
datapath = "/lustre/home/ekoch/THINGS/"
for name in galaxy_props:
galaxy_path = os.path.join(datapath, name)
now_time = timestring()
error_file = \
os.path.join(galaxy_path, "{0}_bubbles_{1}.err".format(name, now_time))
output_file = \
os.path.join(galaxy_path, "{0}_bubbles_{1}.out".format(name, now_time))
# Now submit it!
os.system("qsub -e {2} -o {3} -v INP={1} {0}".format(submit_file,
galaxy_path,
error_file,
output_file))
|
Write the error and output files with the galaxy name and in the right folder
|
Write the error and output files with the galaxy name and in the right folder
|
Python
|
mit
|
e-koch/BaSiCs
|
python
|
## Code Before:
import os
'''
Submits a job for every sample defined in the info dict
'''
script_path = "/lustre/home/ekoch/code_repos/BaSiCs/Examples/THINGS/"
submit_file = os.path.join(script_path, "submit_THINGS.pbs")
# Load in the info dict for the names
execfile(os.path.join(script_path, "info_THINGS.py"))
datapath = "/lustre/home/ekoch/THINGS/"
for name in galaxy_props:
galaxy_path = os.path.join(datapath, name)
# Now submit it!
os.system("qsub -v INP={1} {0}".format(submit_file, galaxy_path))
## Instruction:
Write the error and output files with the galaxy name and in the right folder
## Code After:
import os
from datetime import datetime
'''
Submits a job for every sample defined in the info dict
'''
def timestring():
return datetime.now().strftime("%Y%m%d%H%M%S%f")
script_path = "/lustre/home/ekoch/code_repos/BaSiCs/Examples/THINGS/"
submit_file = os.path.join(script_path, "submit_THINGS.pbs")
# Load in the info dict for the names
execfile(os.path.join(script_path, "info_THINGS.py"))
datapath = "/lustre/home/ekoch/THINGS/"
for name in galaxy_props:
galaxy_path = os.path.join(datapath, name)
now_time = timestring()
error_file = \
os.path.join(galaxy_path, "{0}_bubbles_{1}.err".format(name, now_time))
output_file = \
os.path.join(galaxy_path, "{0}_bubbles_{1}.out".format(name, now_time))
# Now submit it!
os.system("qsub -e {2} -o {3} -v INP={1} {0}".format(submit_file,
galaxy_path,
error_file,
output_file))
|
# ... existing code ...
import os
from datetime import datetime
'''
Submits a job for every sample defined in the info dict
'''
def timestring():
return datetime.now().strftime("%Y%m%d%H%M%S%f")
script_path = "/lustre/home/ekoch/code_repos/BaSiCs/Examples/THINGS/"
# ... modified code ...
for name in galaxy_props:
galaxy_path = os.path.join(datapath, name)
now_time = timestring()
error_file = \
os.path.join(galaxy_path, "{0}_bubbles_{1}.err".format(name, now_time))
output_file = \
os.path.join(galaxy_path, "{0}_bubbles_{1}.out".format(name, now_time))
# Now submit it!
os.system("qsub -e {2} -o {3} -v INP={1} {0}".format(submit_file,
galaxy_path,
error_file,
output_file))
# ... rest of the code ...
|
29cc59bc478c4c6bc936141d19a3386468ff8f07
|
tests/test_general_attributes.py
|
tests/test_general_attributes.py
|
from jawa.attribute import get_attribute_classes
def test_mandatory_attributes():
for parser_class in get_attribute_classes().values():
assert hasattr(parser_class, 'ADDED_IN'), (
'Attribute parser missing mandatory ADDED_IN property'
)
assert hasattr(parser_class, 'MINIMUM_CLASS_VERSION'), (
'Attribute parser missing mandatory MINIMUM_CLASS_VERSION '
'property'
)
|
from jawa.attribute import get_attribute_classes
def test_mandatory_attributes():
required_properities = ['ADDED_IN', 'MINIMUM_CLASS_VERSION']
for name, class_ in get_attribute_classes().items():
for p in required_properities:
assert hasattr(class_, p), (
'{name} parser missing mandatory {p} property'.format(
name=name,
p=p
)
)
def test_attribute_naming():
for name, class_ in get_attribute_classes().items():
if hasattr(class_, 'ATTRIBUTE_NAME'):
continue
assert class_.__name__.endswith('Attribute'), (
'{name} parser does not follow naming convention and does'
' not explicity set it.'.format(name=name)
)
|
Add a simple test for Attribuet class naming conventions.
|
Add a simple test for Attribuet class naming conventions.
|
Python
|
mit
|
TkTech/Jawa,TkTech/Jawa
|
python
|
## Code Before:
from jawa.attribute import get_attribute_classes
def test_mandatory_attributes():
for parser_class in get_attribute_classes().values():
assert hasattr(parser_class, 'ADDED_IN'), (
'Attribute parser missing mandatory ADDED_IN property'
)
assert hasattr(parser_class, 'MINIMUM_CLASS_VERSION'), (
'Attribute parser missing mandatory MINIMUM_CLASS_VERSION '
'property'
)
## Instruction:
Add a simple test for Attribuet class naming conventions.
## Code After:
from jawa.attribute import get_attribute_classes
def test_mandatory_attributes():
required_properities = ['ADDED_IN', 'MINIMUM_CLASS_VERSION']
for name, class_ in get_attribute_classes().items():
for p in required_properities:
assert hasattr(class_, p), (
'{name} parser missing mandatory {p} property'.format(
name=name,
p=p
)
)
def test_attribute_naming():
for name, class_ in get_attribute_classes().items():
if hasattr(class_, 'ATTRIBUTE_NAME'):
continue
assert class_.__name__.endswith('Attribute'), (
'{name} parser does not follow naming convention and does'
' not explicity set it.'.format(name=name)
)
|
# ... existing code ...
def test_mandatory_attributes():
required_properities = ['ADDED_IN', 'MINIMUM_CLASS_VERSION']
for name, class_ in get_attribute_classes().items():
for p in required_properities:
assert hasattr(class_, p), (
'{name} parser missing mandatory {p} property'.format(
name=name,
p=p
)
)
def test_attribute_naming():
for name, class_ in get_attribute_classes().items():
if hasattr(class_, 'ATTRIBUTE_NAME'):
continue
assert class_.__name__.endswith('Attribute'), (
'{name} parser does not follow naming convention and does'
' not explicity set it.'.format(name=name)
)
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.