commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
8dcda9a9dd5c7f106d5544bb185fa348157495fb
|
blimp_boards/accounts/serializers.py
|
blimp_boards/accounts/serializers.py
|
from rest_framework import serializers
from ..utils.fields import DomainNameField
from .fields import SignupDomainsField
from .models import Account
class ValidateSignupDomainsSerializer(serializers.Serializer):
"""
Serializer that handles signup domains validation endpoint.
"""
signup_domains = SignupDomainsField()
class AccountSerializer(serializers.ModelSerializer):
"""
Serializer for Accounts.
"""
class Meta:
model = Account
fields = ('id', 'name', 'slug', 'disqus_shortname', 'logo_color',
'date_created', 'date_modified')
class CheckSignupDomainSerializer(serializers.Serializer):
"""
Serializer to get account that has signup domain setup.
"""
signup_domain = DomainNameField()
def validate_signup_domain(self, attrs, source):
signup_domain = attrs[source]
data = {}
try:
account = Account.objects.get(
allow_signup=True, email_domains__domain_name=signup_domain)
data = AccountSerializer(account).data
except Account.DoesNotExist:
pass
return data
|
from rest_framework import serializers
from ..utils.fields import DomainNameField
from .fields import SignupDomainsField
from .models import Account
class ValidateSignupDomainsSerializer(serializers.Serializer):
"""
Serializer that handles signup domains validation endpoint.
"""
signup_domains = SignupDomainsField()
class AccountSerializer(serializers.ModelSerializer):
"""
Serializer for Accounts.
"""
class Meta:
model = Account
read_only_fields = ('logo_color', )
fields = ('id', 'name', 'slug', 'disqus_shortname', 'logo_color',
'date_created', 'date_modified')
class CheckSignupDomainSerializer(serializers.Serializer):
"""
Serializer to get account that has signup domain setup.
"""
signup_domain = DomainNameField()
def validate_signup_domain(self, attrs, source):
signup_domain = attrs[source]
data = {}
try:
account = Account.objects.get(
allow_signup=True, email_domains__domain_name=signup_domain)
data = AccountSerializer(account).data
except Account.DoesNotExist:
pass
return data
|
Set Account logo_color to be read only
|
Set Account logo_color to be read only
|
Python
|
agpl-3.0
|
jessamynsmith/boards-backend,jessamynsmith/boards-backend,GetBlimp/boards-backend
|
python
|
## Code Before:
from rest_framework import serializers
from ..utils.fields import DomainNameField
from .fields import SignupDomainsField
from .models import Account
class ValidateSignupDomainsSerializer(serializers.Serializer):
"""
Serializer that handles signup domains validation endpoint.
"""
signup_domains = SignupDomainsField()
class AccountSerializer(serializers.ModelSerializer):
"""
Serializer for Accounts.
"""
class Meta:
model = Account
fields = ('id', 'name', 'slug', 'disqus_shortname', 'logo_color',
'date_created', 'date_modified')
class CheckSignupDomainSerializer(serializers.Serializer):
"""
Serializer to get account that has signup domain setup.
"""
signup_domain = DomainNameField()
def validate_signup_domain(self, attrs, source):
signup_domain = attrs[source]
data = {}
try:
account = Account.objects.get(
allow_signup=True, email_domains__domain_name=signup_domain)
data = AccountSerializer(account).data
except Account.DoesNotExist:
pass
return data
## Instruction:
Set Account logo_color to be read only
## Code After:
from rest_framework import serializers
from ..utils.fields import DomainNameField
from .fields import SignupDomainsField
from .models import Account
class ValidateSignupDomainsSerializer(serializers.Serializer):
"""
Serializer that handles signup domains validation endpoint.
"""
signup_domains = SignupDomainsField()
class AccountSerializer(serializers.ModelSerializer):
"""
Serializer for Accounts.
"""
class Meta:
model = Account
read_only_fields = ('logo_color', )
fields = ('id', 'name', 'slug', 'disqus_shortname', 'logo_color',
'date_created', 'date_modified')
class CheckSignupDomainSerializer(serializers.Serializer):
"""
Serializer to get account that has signup domain setup.
"""
signup_domain = DomainNameField()
def validate_signup_domain(self, attrs, source):
signup_domain = attrs[source]
data = {}
try:
account = Account.objects.get(
allow_signup=True, email_domains__domain_name=signup_domain)
data = AccountSerializer(account).data
except Account.DoesNotExist:
pass
return data
|
# ... existing code ...
"""
class Meta:
model = Account
read_only_fields = ('logo_color', )
fields = ('id', 'name', 'slug', 'disqus_shortname', 'logo_color',
'date_created', 'date_modified')
# ... rest of the code ...
|
e0ddd80ea2d23f9b5fc32dd8a5ea13f9cb30da49
|
app/packages/__init__.py
|
app/packages/__init__.py
|
from flask import Blueprint
packages = Blueprint('packages', __name__)
from . import views, models
from utils import github_data
def post_get_single(result=None, **kw):
result.update(result.pop("get_json"))
result.update(github_data(result['name'], result['author'], result['url']))
# runs for search request
def post_get_many(result=None, search_params=None, **kw):
for item in result["objects"]:
item.update(item.pop("get_json"))
def api_creator(apimanager):
apimanager.create_api(models.Package, primary_key='name', methods=['GET'],
include_methods=['get_json'],
include_columns=[],
postprocessors={
'GET_SINGLE': [post_get_single],
'GET_MANY': [post_get_many]
})
|
from flask import Blueprint
packages = Blueprint('packages', __name__)
from . import views, models
from utils import github_data
def post_get_single(result=None, **kw):
result.update(result.pop("get_json"))
result.update(github_data(result['name'], result['author'], result['url']))
# runs for search request
def post_get_many(result=None, search_params=None, **kw):
for item in result["objects"]:
item.update(item.pop("get_json"))
def search_filter(search_params=None, **kw):
if (search_params is None) or search_params.get("name") is None:
return
def filter_string(name):
filter = []
filter.append(dict(name='name',
val='%' + name + '%',
op='like'
)
)
filter.append(dict(name="keywords__name",
val=name,
op="any"
))
return filter
search_params['filters'] = []
args = search_params['name'].split()
for item in args:
search_params['filters'].extend(filter_string(item))
search_params['disjunction'] = True
def api_creator(apimanager):
apimanager.create_api(models.Package, primary_key='name', methods=['GET'],
include_methods=['get_json'],
include_columns=[],
postprocessors={
'GET_SINGLE': [post_get_single],
'GET_MANY': [post_get_many]
})
apimanager.create_api(models.Package, primary_key='name',
collection_name='search',
methods=['GET'],
include_methods=['get_json'],
include_columns=[],
preprocessors={
'GET_MANY': [search_filter]
})
|
Add api for package search based on name and keywords
|
Add api for package search based on name and keywords
|
Python
|
bsd-2-clause
|
NikhilKalige/atom-website,NikhilKalige/atom-website,NikhilKalige/atom-website
|
python
|
## Code Before:
from flask import Blueprint
packages = Blueprint('packages', __name__)
from . import views, models
from utils import github_data
def post_get_single(result=None, **kw):
result.update(result.pop("get_json"))
result.update(github_data(result['name'], result['author'], result['url']))
# runs for search request
def post_get_many(result=None, search_params=None, **kw):
for item in result["objects"]:
item.update(item.pop("get_json"))
def api_creator(apimanager):
apimanager.create_api(models.Package, primary_key='name', methods=['GET'],
include_methods=['get_json'],
include_columns=[],
postprocessors={
'GET_SINGLE': [post_get_single],
'GET_MANY': [post_get_many]
})
## Instruction:
Add api for package search based on name and keywords
## Code After:
from flask import Blueprint
packages = Blueprint('packages', __name__)
from . import views, models
from utils import github_data
def post_get_single(result=None, **kw):
result.update(result.pop("get_json"))
result.update(github_data(result['name'], result['author'], result['url']))
# runs for search request
def post_get_many(result=None, search_params=None, **kw):
for item in result["objects"]:
item.update(item.pop("get_json"))
def search_filter(search_params=None, **kw):
if (search_params is None) or search_params.get("name") is None:
return
def filter_string(name):
filter = []
filter.append(dict(name='name',
val='%' + name + '%',
op='like'
)
)
filter.append(dict(name="keywords__name",
val=name,
op="any"
))
return filter
search_params['filters'] = []
args = search_params['name'].split()
for item in args:
search_params['filters'].extend(filter_string(item))
search_params['disjunction'] = True
def api_creator(apimanager):
apimanager.create_api(models.Package, primary_key='name', methods=['GET'],
include_methods=['get_json'],
include_columns=[],
postprocessors={
'GET_SINGLE': [post_get_single],
'GET_MANY': [post_get_many]
})
apimanager.create_api(models.Package, primary_key='name',
collection_name='search',
methods=['GET'],
include_methods=['get_json'],
include_columns=[],
preprocessors={
'GET_MANY': [search_filter]
})
|
// ... existing code ...
item.update(item.pop("get_json"))
def search_filter(search_params=None, **kw):
if (search_params is None) or search_params.get("name") is None:
return
def filter_string(name):
filter = []
filter.append(dict(name='name',
val='%' + name + '%',
op='like'
)
)
filter.append(dict(name="keywords__name",
val=name,
op="any"
))
return filter
search_params['filters'] = []
args = search_params['name'].split()
for item in args:
search_params['filters'].extend(filter_string(item))
search_params['disjunction'] = True
def api_creator(apimanager):
apimanager.create_api(models.Package, primary_key='name', methods=['GET'],
include_methods=['get_json'],
// ... modified code ...
'GET_SINGLE': [post_get_single],
'GET_MANY': [post_get_many]
})
apimanager.create_api(models.Package, primary_key='name',
collection_name='search',
methods=['GET'],
include_methods=['get_json'],
include_columns=[],
preprocessors={
'GET_MANY': [search_filter]
})
// ... rest of the code ...
|
a6b6e01c38de1c69db431143611c20e308c6c665
|
FetchTopActivity/app/src/main/java/com/chanryma/fetchtopactivity/ActivityB.java
|
FetchTopActivity/app/src/main/java/com/chanryma/fetchtopactivity/ActivityB.java
|
package com.chanryma.fetchtopactivity;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
public class ActivityB extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_b);
}
public void onButtonClick(View view) {
Intent intent = new Intent(MyApplication.getInstance().getApplicationContext(), ActivityC.class);
MyApplication.getInstance().getCurrentActivity().startActivity(intent);
}
}
|
package com.chanryma.fetchtopactivity;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
public class ActivityB extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_b);
}
public void onButtonClick(View view) {
// Intent intent = new Intent(MyApplication.getInstance().getApplicationContext(), ActivityC.class);
// MyApplication.getInstance().getCurrentActivity().startActivity(intent);
Intent intent = new Intent(ActivityB.this, ActivityC.class);
startActivity(intent);
}
}
|
Change the way to start ActivityC
|
Change the way to start ActivityC
|
Java
|
mit
|
Chanryma/Demo,Chanryma/Demo
|
java
|
## Code Before:
package com.chanryma.fetchtopactivity;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
public class ActivityB extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_b);
}
public void onButtonClick(View view) {
Intent intent = new Intent(MyApplication.getInstance().getApplicationContext(), ActivityC.class);
MyApplication.getInstance().getCurrentActivity().startActivity(intent);
}
}
## Instruction:
Change the way to start ActivityC
## Code After:
package com.chanryma.fetchtopactivity;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
public class ActivityB extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_b);
}
public void onButtonClick(View view) {
// Intent intent = new Intent(MyApplication.getInstance().getApplicationContext(), ActivityC.class);
// MyApplication.getInstance().getCurrentActivity().startActivity(intent);
Intent intent = new Intent(ActivityB.this, ActivityC.class);
startActivity(intent);
}
}
|
...
}
public void onButtonClick(View view) {
// Intent intent = new Intent(MyApplication.getInstance().getApplicationContext(), ActivityC.class);
// MyApplication.getInstance().getCurrentActivity().startActivity(intent);
Intent intent = new Intent(ActivityB.this, ActivityC.class);
startActivity(intent);
}
}
...
|
d0c6e7b834f404d2d8a6dd6650b5e1eca302eff0
|
test/wssec/SOAPUtil.java
|
test/wssec/SOAPUtil.java
|
package wssec;
import org.apache.xml.security.c14n.Canonicalizer;
import org.w3c.dom.Document;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.SOAPMessage;
import javax.xml.transform.dom.DOMSource;
import java.io.ByteArrayInputStream;
public class SOAPUtil {
/**
* Convert a DOM Document into a soap message.
* <p/>
*
* @param doc
* @return
* @throws Exception
*/
public static SOAPMessage toSOAPMessage(Document doc) throws Exception {
Canonicalizer c14n =
Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS);
byte[] canonicalMessage = c14n.canonicalizeSubtree(doc);
ByteArrayInputStream in = new ByteArrayInputStream(canonicalMessage);
MessageFactory factory = MessageFactory.newInstance();
return factory.createMessage(null, in);
}
/**
* Update soap message.
* <p/>
*
* @param doc
* @param message
* @return
* @throws Exception
*/
public static SOAPMessage updateSOAPMessage(Document doc,
SOAPMessage message)
throws Exception {
DOMSource domSource = new DOMSource(doc);
message.getSOAPPart().setContent(domSource);
return message;
}
}
|
package wssec;
import org.apache.xml.security.c14n.Canonicalizer;
import org.w3c.dom.Document;
import org.apache.axis.Message;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.SOAPMessage;
import javax.xml.transform.dom.DOMSource;
import java.io.ByteArrayInputStream;
public class SOAPUtil {
/**
* Convert a DOM Document into a soap message.
* <p/>
*
* @param doc
* @return
* @throws Exception
*/
public static SOAPMessage toSOAPMessage(Document doc) throws Exception {
Canonicalizer c14n =
Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS);
byte[] canonicalMessage = c14n.canonicalizeSubtree(doc);
ByteArrayInputStream in = new ByteArrayInputStream(canonicalMessage);
MessageFactory factory = MessageFactory.newInstance();
return factory.createMessage(null, in);
}
/**
* Convert a DOM Document into an Axis message.
* <p/>
*
* @param doc
* @return
* @throws Exception
*/
public static Message toAxisMessage(Document doc) throws Exception {
Canonicalizer c14n =
Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS);
byte[] canonicalMessage = c14n.canonicalizeSubtree(doc);
ByteArrayInputStream in = new ByteArrayInputStream(canonicalMessage);
return new Message(in);
}
/**
* Update soap message.
* <p/>
*
* @param doc
* @param message
* @return
* @throws Exception
*/
public static SOAPMessage updateSOAPMessage(Document doc,
SOAPMessage message)
throws Exception {
DOMSource domSource = new DOMSource(doc);
message.getSOAPPart().setContent(domSource);
return message;
}
}
|
Introduce new "toAxisMessage()" to create a Axis message from a DOM document. Use this new function instead of "toSOAPMessage()". This resolves a problem in Java 6 which has a built-in xml.SOAPMessage implementation. This implementation is in conflict with the previous used Axis implementation. Previously the MessageFactory returned an Axis message that implements a SOAPMessage interface. The test cases (TestWS*) used this internal know-how and cast the SOAPMessage into an Axis message directly. This fails in Java 6 and was bad programming style anyhow. The TestWS* test cases will be modified to use the new function.
|
Introduce new "toAxisMessage()" to create a Axis message from a DOM
document. Use this new function instead of "toSOAPMessage()". This
resolves a problem in Java 6 which has a built-in xml.SOAPMessage
implementation. This implementation is in conflict with the previous
used Axis implementation. Previously the MessageFactory returned an
Axis message that implements a SOAPMessage interface. The test cases
(TestWS*) used this internal know-how and cast the SOAPMessage into an
Axis message directly. This fails in Java 6 and was bad programming style
anyhow. The TestWS* test cases will be modified to use the new function.
git-svn-id: 10bc45916fe30ae642aa5037c9a4b05727bba413@533031 13f79535-47bb-0310-9956-ffa450edef68
|
Java
|
apache-2.0
|
asoldano/wss4j,clibois/wss4j,asoldano/wss4j,apache/wss4j,jimma/wss4j,jimma/wss4j,clibois/wss4j,apache/wss4j
|
java
|
## Code Before:
package wssec;
import org.apache.xml.security.c14n.Canonicalizer;
import org.w3c.dom.Document;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.SOAPMessage;
import javax.xml.transform.dom.DOMSource;
import java.io.ByteArrayInputStream;
public class SOAPUtil {
/**
* Convert a DOM Document into a soap message.
* <p/>
*
* @param doc
* @return
* @throws Exception
*/
public static SOAPMessage toSOAPMessage(Document doc) throws Exception {
Canonicalizer c14n =
Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS);
byte[] canonicalMessage = c14n.canonicalizeSubtree(doc);
ByteArrayInputStream in = new ByteArrayInputStream(canonicalMessage);
MessageFactory factory = MessageFactory.newInstance();
return factory.createMessage(null, in);
}
/**
* Update soap message.
* <p/>
*
* @param doc
* @param message
* @return
* @throws Exception
*/
public static SOAPMessage updateSOAPMessage(Document doc,
SOAPMessage message)
throws Exception {
DOMSource domSource = new DOMSource(doc);
message.getSOAPPart().setContent(domSource);
return message;
}
}
## Instruction:
Introduce new "toAxisMessage()" to create a Axis message from a DOM
document. Use this new function instead of "toSOAPMessage()". This
resolves a problem in Java 6 which has a built-in xml.SOAPMessage
implementation. This implementation is in conflict with the previous
used Axis implementation. Previously the MessageFactory returned an
Axis message that implements a SOAPMessage interface. The test cases
(TestWS*) used this internal know-how and cast the SOAPMessage into an
Axis message directly. This fails in Java 6 and was bad programming style
anyhow. The TestWS* test cases will be modified to use the new function.
git-svn-id: 10bc45916fe30ae642aa5037c9a4b05727bba413@533031 13f79535-47bb-0310-9956-ffa450edef68
## Code After:
package wssec;
import org.apache.xml.security.c14n.Canonicalizer;
import org.w3c.dom.Document;
import org.apache.axis.Message;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.SOAPMessage;
import javax.xml.transform.dom.DOMSource;
import java.io.ByteArrayInputStream;
public class SOAPUtil {
/**
* Convert a DOM Document into a soap message.
* <p/>
*
* @param doc
* @return
* @throws Exception
*/
public static SOAPMessage toSOAPMessage(Document doc) throws Exception {
Canonicalizer c14n =
Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS);
byte[] canonicalMessage = c14n.canonicalizeSubtree(doc);
ByteArrayInputStream in = new ByteArrayInputStream(canonicalMessage);
MessageFactory factory = MessageFactory.newInstance();
return factory.createMessage(null, in);
}
/**
* Convert a DOM Document into an Axis message.
* <p/>
*
* @param doc
* @return
* @throws Exception
*/
public static Message toAxisMessage(Document doc) throws Exception {
Canonicalizer c14n =
Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS);
byte[] canonicalMessage = c14n.canonicalizeSubtree(doc);
ByteArrayInputStream in = new ByteArrayInputStream(canonicalMessage);
return new Message(in);
}
/**
* Update soap message.
* <p/>
*
* @param doc
* @param message
* @return
* @throws Exception
*/
public static SOAPMessage updateSOAPMessage(Document doc,
SOAPMessage message)
throws Exception {
DOMSource domSource = new DOMSource(doc);
message.getSOAPPart().setContent(domSource);
return message;
}
}
|
# ... existing code ...
import org.apache.xml.security.c14n.Canonicalizer;
import org.w3c.dom.Document;
import org.apache.axis.Message;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.SOAPMessage;
# ... modified code ...
}
/**
* Convert a DOM Document into an Axis message.
* <p/>
*
* @param doc
* @return
* @throws Exception
*/
public static Message toAxisMessage(Document doc) throws Exception {
Canonicalizer c14n =
Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS);
byte[] canonicalMessage = c14n.canonicalizeSubtree(doc);
ByteArrayInputStream in = new ByteArrayInputStream(canonicalMessage);
return new Message(in);
}
/**
* Update soap message.
* <p/>
*
# ... rest of the code ...
|
a754323facdb05b18d19a1a0365ad12e8c25ed06
|
ocradmin/core/tests/test_core.py
|
ocradmin/core/tests/test_core.py
|
import subprocess as sp
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.conf import settings
class CoreTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_isri_tools(self):
"""
Ensure running 'accuracy' with no args results
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
p = sp.Popen(["accuracy"], stderr=sp.PIPE)
self.assertRegexpMatches(p.communicate()[1], "^Usage")
|
import subprocess as sp
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.conf import settings
class CoreTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_isri_tools(self):
"""
Ensure running 'accuracy' with no args results
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
stdout, stderr = self._run_cmd("accuracy")
self.assertRegexpMatches(stderr, "^Usage")
def test_cuneiform(self):
"""
Ensure cuneiform is available. This is fragile since it depends
on Cuneiform's annoying output on stdout.
"""
stdout, stderr = self._run_cmd("cuneiform")
self.assertRegexpMatches(stdout, "^Cuneiform for Linux")
def test_tesseract(self):
"""
Ensure tesseract is available.
"""
stdout, stderr = self._run_cmd("tesseract")
self.assertRegexpMatches(stderr, "^Usage")
def test_convert(self):
"""
Ensure (Image|Graphics)Magick is available.
"""
stdout, stderr = self._run_cmd("convert")
self.assertRegexpMatches(stdout, "Usage")
def _run_cmd(self, *args):
p = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
return p.communicate()
|
Test the presence of various tools
|
Test the presence of various tools
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
python
|
## Code Before:
import subprocess as sp
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.conf import settings
class CoreTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_isri_tools(self):
"""
Ensure running 'accuracy' with no args results
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
p = sp.Popen(["accuracy"], stderr=sp.PIPE)
self.assertRegexpMatches(p.communicate()[1], "^Usage")
## Instruction:
Test the presence of various tools
## Code After:
import subprocess as sp
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.conf import settings
class CoreTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_isri_tools(self):
"""
Ensure running 'accuracy' with no args results
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
stdout, stderr = self._run_cmd("accuracy")
self.assertRegexpMatches(stderr, "^Usage")
def test_cuneiform(self):
"""
Ensure cuneiform is available. This is fragile since it depends
on Cuneiform's annoying output on stdout.
"""
stdout, stderr = self._run_cmd("cuneiform")
self.assertRegexpMatches(stdout, "^Cuneiform for Linux")
def test_tesseract(self):
"""
Ensure tesseract is available.
"""
stdout, stderr = self._run_cmd("tesseract")
self.assertRegexpMatches(stderr, "^Usage")
def test_convert(self):
"""
Ensure (Image|Graphics)Magick is available.
"""
stdout, stderr = self._run_cmd("convert")
self.assertRegexpMatches(stdout, "Usage")
def _run_cmd(self, *args):
p = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
return p.communicate()
|
...
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
stdout, stderr = self._run_cmd("accuracy")
self.assertRegexpMatches(stderr, "^Usage")
def test_cuneiform(self):
"""
Ensure cuneiform is available. This is fragile since it depends
on Cuneiform's annoying output on stdout.
"""
stdout, stderr = self._run_cmd("cuneiform")
self.assertRegexpMatches(stdout, "^Cuneiform for Linux")
def test_tesseract(self):
"""
Ensure tesseract is available.
"""
stdout, stderr = self._run_cmd("tesseract")
self.assertRegexpMatches(stderr, "^Usage")
def test_convert(self):
"""
Ensure (Image|Graphics)Magick is available.
"""
stdout, stderr = self._run_cmd("convert")
self.assertRegexpMatches(stdout, "Usage")
def _run_cmd(self, *args):
p = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
return p.communicate()
...
|
e67c57128f88b61eac08e488e54343d48f1454c7
|
ddcz/forms/authentication.py
|
ddcz/forms/authentication.py
|
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=20)
password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
|
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
|
Update LoginForm to match reality
|
Update LoginForm to match reality
|
Python
|
mit
|
dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard
|
python
|
## Code Before:
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=20)
password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
## Instruction:
Update LoginForm to match reality
## Code After:
import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
|
# ... existing code ...
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
# ... rest of the code ...
|
9e3a6190b2dcfd7de03ef5c974b400a51219839e
|
pyof/v0x04/symmetric/hello.py
|
pyof/v0x04/symmetric/hello.py
|
"""Defines Hello message."""
# System imports
# Third-party imports
from pyof.v0x01.symmetric.hello import Hello
__all__ = ('Hello',)
|
"""Defines Hello message."""
# System imports
from enum import Enum
from pyof.foundation.base import GenericMessage, GenericStruct
from pyof.foundation.basic_types import BinaryData, FixedTypeList, UBInt16
from pyof.v0x04.common.header import Header, Type
# Third-party imports
__all__ = ('Hello', 'HelloElemHeader', 'HelloElemType',
'HelloElemVersionbitmap', 'ListOfHelloElements')
# Enums
class HelloElemType(Enum):
"""Hello element types."""
#: Bitmap of version supported.
OFPHET_VERSIONBITMAP = 1
# Classes
class HelloElemHeader(GenericStruct):
"""Common header for all Hello Elements."""
element_type = UBInt16(enum_ref=HelloElemType)
length = UBInt16()
def __init__(self, element_type=None, length=None):
"""The constructor just assigns parameters to object attributes.
Args:
element_type: One of OFPHET_*.
length: Length in bytes of the element, including this header,
excluding padding.
"""
super().__init__()
self.element_type = element_type
self.length = length
class ListOfHelloElements(FixedTypeList):
"""List of Hello elements.
Represented by instances of HelloElemHeader and used on Hello
objects.
"""
def __init__(self, items=None):
"""The constructor just assigns parameters to object attributes.
Args:
items (HelloElemHeader): Instance or a list of instances.
"""
super().__init__(pyof_class=HelloElemHeader, items=items)
class Hello(GenericMessage):
"""OpenFlow Hello Message OFPT_HELLO.
This message includes zero or more hello elements having variable size.
Unknown element types must be ignored/skipped, to allow for future
extensions.
"""
header = Header(message_type=Type.OFPT_HELLO, length=8)
#: Hello element list
elements = ListOfHelloElements()
def __init__(self, xid=None, elements=None):
"""The constructor takes the parameters below.
Args:
xid (int): xid to be used on the message header.
elements: List of elements - 0 or more
"""
super().__init__(xid)
self.elements = elements
class HelloElemVersionbitmap(HelloElemHeader):
"""Version bitmap Hello Element."""
#: List of bitmaps - supported versions
bitmaps = BinaryData()
def __init__(self, bitmaps=b''):
"""The constructor just assigns parameters to object attributes.
Args:
bitmaps: -Exactly (length - 4) bytes containing the bitmaps,then
-Exactly (length + 7)/8*8 - (length) (between 0 and 7)
bytes of all-zero bytes.
"""
super().__init__(element_type=HelloElemType.OFPHET_VERSIONBITMAP,
length=None)
self.bitmaps = bitmaps
|
Add Hello class and related classes for v0x04
|
Add Hello class and related classes for v0x04
Fix #302
Fix #303
|
Python
|
mit
|
cemsbr/python-openflow,kytos/python-openflow
|
python
|
## Code Before:
"""Defines Hello message."""
# System imports
# Third-party imports
from pyof.v0x01.symmetric.hello import Hello
__all__ = ('Hello',)
## Instruction:
Add Hello class and related classes for v0x04
Fix #302
Fix #303
## Code After:
"""Defines Hello message."""
# System imports
from enum import Enum
from pyof.foundation.base import GenericMessage, GenericStruct
from pyof.foundation.basic_types import BinaryData, FixedTypeList, UBInt16
from pyof.v0x04.common.header import Header, Type
# Third-party imports
__all__ = ('Hello', 'HelloElemHeader', 'HelloElemType',
'HelloElemVersionbitmap', 'ListOfHelloElements')
# Enums
class HelloElemType(Enum):
"""Hello element types."""
#: Bitmap of version supported.
OFPHET_VERSIONBITMAP = 1
# Classes
class HelloElemHeader(GenericStruct):
"""Common header for all Hello Elements."""
element_type = UBInt16(enum_ref=HelloElemType)
length = UBInt16()
def __init__(self, element_type=None, length=None):
"""The constructor just assigns parameters to object attributes.
Args:
element_type: One of OFPHET_*.
length: Length in bytes of the element, including this header,
excluding padding.
"""
super().__init__()
self.element_type = element_type
self.length = length
class ListOfHelloElements(FixedTypeList):
"""List of Hello elements.
Represented by instances of HelloElemHeader and used on Hello
objects.
"""
def __init__(self, items=None):
"""The constructor just assigns parameters to object attributes.
Args:
items (HelloElemHeader): Instance or a list of instances.
"""
super().__init__(pyof_class=HelloElemHeader, items=items)
class Hello(GenericMessage):
"""OpenFlow Hello Message OFPT_HELLO.
This message includes zero or more hello elements having variable size.
Unknown element types must be ignored/skipped, to allow for future
extensions.
"""
header = Header(message_type=Type.OFPT_HELLO, length=8)
#: Hello element list
elements = ListOfHelloElements()
def __init__(self, xid=None, elements=None):
"""The constructor takes the parameters below.
Args:
xid (int): xid to be used on the message header.
elements: List of elements - 0 or more
"""
super().__init__(xid)
self.elements = elements
class HelloElemVersionbitmap(HelloElemHeader):
"""Version bitmap Hello Element."""
#: List of bitmaps - supported versions
bitmaps = BinaryData()
def __init__(self, bitmaps=b''):
"""The constructor just assigns parameters to object attributes.
Args:
bitmaps: -Exactly (length - 4) bytes containing the bitmaps,then
-Exactly (length + 7)/8*8 - (length) (between 0 and 7)
bytes of all-zero bytes.
"""
super().__init__(element_type=HelloElemType.OFPHET_VERSIONBITMAP,
length=None)
self.bitmaps = bitmaps
|
# ... existing code ...
# System imports
from enum import Enum
from pyof.foundation.base import GenericMessage, GenericStruct
from pyof.foundation.basic_types import BinaryData, FixedTypeList, UBInt16
from pyof.v0x04.common.header import Header, Type
# Third-party imports
__all__ = ('Hello', 'HelloElemHeader', 'HelloElemType',
'HelloElemVersionbitmap', 'ListOfHelloElements')
# Enums
class HelloElemType(Enum):
"""Hello element types."""
#: Bitmap of version supported.
OFPHET_VERSIONBITMAP = 1
# Classes
class HelloElemHeader(GenericStruct):
"""Common header for all Hello Elements."""
element_type = UBInt16(enum_ref=HelloElemType)
length = UBInt16()
def __init__(self, element_type=None, length=None):
"""The constructor just assigns parameters to object attributes.
Args:
element_type: One of OFPHET_*.
length: Length in bytes of the element, including this header,
excluding padding.
"""
super().__init__()
self.element_type = element_type
self.length = length
class ListOfHelloElements(FixedTypeList):
"""List of Hello elements.
Represented by instances of HelloElemHeader and used on Hello
objects.
"""
def __init__(self, items=None):
"""The constructor just assigns parameters to object attributes.
Args:
items (HelloElemHeader): Instance or a list of instances.
"""
super().__init__(pyof_class=HelloElemHeader, items=items)
class Hello(GenericMessage):
"""OpenFlow Hello Message OFPT_HELLO.
This message includes zero or more hello elements having variable size.
Unknown element types must be ignored/skipped, to allow for future
extensions.
"""
header = Header(message_type=Type.OFPT_HELLO, length=8)
#: Hello element list
elements = ListOfHelloElements()
def __init__(self, xid=None, elements=None):
"""The constructor takes the parameters below.
Args:
xid (int): xid to be used on the message header.
elements: List of elements - 0 or more
"""
super().__init__(xid)
self.elements = elements
class HelloElemVersionbitmap(HelloElemHeader):
"""Version bitmap Hello Element."""
#: List of bitmaps - supported versions
bitmaps = BinaryData()
def __init__(self, bitmaps=b''):
"""The constructor just assigns parameters to object attributes.
Args:
bitmaps: -Exactly (length - 4) bytes containing the bitmaps,then
-Exactly (length + 7)/8*8 - (length) (between 0 and 7)
bytes of all-zero bytes.
"""
super().__init__(element_type=HelloElemType.OFPHET_VERSIONBITMAP,
length=None)
self.bitmaps = bitmaps
# ... rest of the code ...
|
aee95684b289b3a34702454fbd1fa1bbd4e48d38
|
src/gfx/gl/texture.h
|
src/gfx/gl/texture.h
|
/*
* Copyright (C) 2015 Luke San Antonio
* All rights reserved.
*/
#pragma once
#include "glad/glad.h"
#include "../texture.h"
namespace game { namespace gfx { namespace gl
{
struct GL_Texture : public Texture
{
void allocate_(Vec<int> const&, Image_Format) noexcept override;
inline void blit_data_(Volume<int> const& vol,
Color const* data) noexcept override
{
// Fuck it, it might work.
blit_data_(vol, Data_Type::Integer, data);
}
inline void blit_data_(Volume<int> const& vol,
float const* data) noexcept override
{
blit_data_(vol, Data_Type::Float, data);
}
void blit_data_(Volume<int> const&, Data_Type,
void const*) noexcept override;
GLuint tex_id;
GLenum texture_type;
Image_Format format_;
void bind(unsigned int loc) const noexcept;
};
} } }
|
/*
* Copyright (C) 2015 Luke San Antonio
* All rights reserved.
*/
#pragma once
#include "glad/glad.h"
#include "../texture.h"
namespace game { namespace gfx { namespace gl
{
struct GL_Texture : public Texture
{
void allocate_(Vec<int> const&, Image_Format) noexcept override;
inline void blit_data_(Volume<int> const& vol,
Color const* data) noexcept override
{
// Fuck it, it might work.
static_assert(sizeof(Color) == sizeof(Color::c_t) * 4,
"Color struct must not have any padding.");
blit_data_(vol, Data_Type::Integer, data);
}
inline void blit_data_(Volume<int> const& vol,
float const* data) noexcept override
{
blit_data_(vol, Data_Type::Float, data);
}
void blit_data_(Volume<int> const&, Data_Type,
void const*) noexcept override;
GLuint tex_id;
GLenum texture_type;
Image_Format format_;
void bind(unsigned int loc) const noexcept;
};
} } }
|
Verify array of colors is contigous
|
Verify array of colors is contigous
|
C
|
bsd-3-clause
|
RedCraneStudio/redcrane-engine,RedCraneStudio/redcrane-engine,RedCraneStudio/redcrane-engine,RedCraneStudio/redcrane-engine
|
c
|
## Code Before:
/*
* Copyright (C) 2015 Luke San Antonio
* All rights reserved.
*/
#pragma once
#include "glad/glad.h"
#include "../texture.h"
namespace game { namespace gfx { namespace gl
{
struct GL_Texture : public Texture
{
void allocate_(Vec<int> const&, Image_Format) noexcept override;
inline void blit_data_(Volume<int> const& vol,
Color const* data) noexcept override
{
// Fuck it, it might work.
blit_data_(vol, Data_Type::Integer, data);
}
inline void blit_data_(Volume<int> const& vol,
float const* data) noexcept override
{
blit_data_(vol, Data_Type::Float, data);
}
void blit_data_(Volume<int> const&, Data_Type,
void const*) noexcept override;
GLuint tex_id;
GLenum texture_type;
Image_Format format_;
void bind(unsigned int loc) const noexcept;
};
} } }
## Instruction:
Verify array of colors is contigous
## Code After:
/*
* Copyright (C) 2015 Luke San Antonio
* All rights reserved.
*/
#pragma once
#include "glad/glad.h"
#include "../texture.h"
namespace game { namespace gfx { namespace gl
{
struct GL_Texture : public Texture
{
void allocate_(Vec<int> const&, Image_Format) noexcept override;
inline void blit_data_(Volume<int> const& vol,
Color const* data) noexcept override
{
// Fuck it, it might work.
static_assert(sizeof(Color) == sizeof(Color::c_t) * 4,
"Color struct must not have any padding.");
blit_data_(vol, Data_Type::Integer, data);
}
inline void blit_data_(Volume<int> const& vol,
float const* data) noexcept override
{
blit_data_(vol, Data_Type::Float, data);
}
void blit_data_(Volume<int> const&, Data_Type,
void const*) noexcept override;
GLuint tex_id;
GLenum texture_type;
Image_Format format_;
void bind(unsigned int loc) const noexcept;
};
} } }
|
// ... existing code ...
Color const* data) noexcept override
{
// Fuck it, it might work.
static_assert(sizeof(Color) == sizeof(Color::c_t) * 4,
"Color struct must not have any padding.");
blit_data_(vol, Data_Type::Integer, data);
}
// ... rest of the code ...
|
f94347a734df84811b627e767a55dc99831e0076
|
src/models.py
|
src/models.py
|
from flask_sqlalchemy import Model
from sqlalchemy import Column, Integer, Unicode, UnicodeText, ForeignKey
from sqlalchemy.orm import relationship
class User(Model):
__tablename__ = "user"
ROLE_ADMIN = 0
ROLE_USER = 1
id = Column(Integer, primary_key=True)
name = Column(Unicode(64), index=True)
username = Column(Unicode(20), index=True)
password_hash = Column(Unicode(120))
role = Column(Integer, default=ROLE_USER)
postings = relationship("Posting", backref="user")
class Posting(Model):
__tablename__ = "posting"
id = Column(Integer, primary_key=True)
title = Column(Unicode(64), index=True)
description = Column(1200)
price = Column(Integer, default=100)
user_id = ForeignKey("user.id", index=True)
|
from flask_sqlalchemy import Model
from __init__ import db
class User(Model):
__tablename__ = "user"
ROLE_ADMIN = 0
ROLE_USER = 1
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Unicode(64), index=True)
username = db.Column(db.Unicode(20), index=True)
password_hash = db.Column(db.Unicode(120))
role = db.Column(db.Integer, default=ROLE_USER)
postings = db.relationship("Posting", backref="user")
class Posting(Model):
__tablename__ = "posting"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.Unicode(64), index=True)
description = db.Column(1200)
price = db.Column(db.Integer, default=100)
user_id = db.ForeignKey("user.id", index=True)
|
Use local database namespace instead of package.
|
Use local database namespace instead of package.
|
Python
|
mit
|
BrambleLLC/Artizanz,BrambleLLC/Artizanz,BrambleLLC/Artizanz
|
python
|
## Code Before:
from flask_sqlalchemy import Model
from sqlalchemy import Column, Integer, Unicode, UnicodeText, ForeignKey
from sqlalchemy.orm import relationship
class User(Model):
__tablename__ = "user"
ROLE_ADMIN = 0
ROLE_USER = 1
id = Column(Integer, primary_key=True)
name = Column(Unicode(64), index=True)
username = Column(Unicode(20), index=True)
password_hash = Column(Unicode(120))
role = Column(Integer, default=ROLE_USER)
postings = relationship("Posting", backref="user")
class Posting(Model):
__tablename__ = "posting"
id = Column(Integer, primary_key=True)
title = Column(Unicode(64), index=True)
description = Column(1200)
price = Column(Integer, default=100)
user_id = ForeignKey("user.id", index=True)
## Instruction:
Use local database namespace instead of package.
## Code After:
from flask_sqlalchemy import Model
from __init__ import db
class User(Model):
__tablename__ = "user"
ROLE_ADMIN = 0
ROLE_USER = 1
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Unicode(64), index=True)
username = db.Column(db.Unicode(20), index=True)
password_hash = db.Column(db.Unicode(120))
role = db.Column(db.Integer, default=ROLE_USER)
postings = db.relationship("Posting", backref="user")
class Posting(Model):
__tablename__ = "posting"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.Unicode(64), index=True)
description = db.Column(1200)
price = db.Column(db.Integer, default=100)
user_id = db.ForeignKey("user.id", index=True)
|
...
from flask_sqlalchemy import Model
from __init__ import db
class User(Model):
...
__tablename__ = "user"
ROLE_ADMIN = 0
ROLE_USER = 1
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Unicode(64), index=True)
username = db.Column(db.Unicode(20), index=True)
password_hash = db.Column(db.Unicode(120))
role = db.Column(db.Integer, default=ROLE_USER)
postings = db.relationship("Posting", backref="user")
class Posting(Model):
__tablename__ = "posting"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.Unicode(64), index=True)
description = db.Column(1200)
price = db.Column(db.Integer, default=100)
user_id = db.ForeignKey("user.id", index=True)
...
|
7bf4083ef44585116f0eff86753080612a26b374
|
src/__init__.py
|
src/__init__.py
|
from bayeslite.api import barplot
from bayeslite.api import cardinality
from bayeslite.api import draw_crosscat
from bayeslite.api import estimate_log_likelihood
from bayeslite.api import heatmap
from bayeslite.api import histogram
from bayeslite.api import mi_hist
from bayeslite.api import nullify
from bayeslite.api import pairplot
from bayeslite.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
]
|
from bdbcontrib.api import barplot
from bdbcontrib.api import cardinality
from bdbcontrib.api import draw_crosscat
from bdbcontrib.api import estimate_log_likelihood
from bdbcontrib.api import heatmap
from bdbcontrib.api import histogram
from bdbcontrib.api import mi_hist
from bdbcontrib.api import nullify
from bdbcontrib.api import pairplot
from bdbcontrib.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
]
|
Fix big from bayeslite to bdbcontrib.
|
Fix big from bayeslite to bdbcontrib.
|
Python
|
apache-2.0
|
probcomp/bdbcontrib,probcomp/bdbcontrib
|
python
|
## Code Before:
from bayeslite.api import barplot
from bayeslite.api import cardinality
from bayeslite.api import draw_crosscat
from bayeslite.api import estimate_log_likelihood
from bayeslite.api import heatmap
from bayeslite.api import histogram
from bayeslite.api import mi_hist
from bayeslite.api import nullify
from bayeslite.api import pairplot
from bayeslite.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
]
## Instruction:
Fix big from bayeslite to bdbcontrib.
## Code After:
from bdbcontrib.api import barplot
from bdbcontrib.api import cardinality
from bdbcontrib.api import draw_crosscat
from bdbcontrib.api import estimate_log_likelihood
from bdbcontrib.api import heatmap
from bdbcontrib.api import histogram
from bdbcontrib.api import mi_hist
from bdbcontrib.api import nullify
from bdbcontrib.api import pairplot
from bdbcontrib.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
]
|
# ... existing code ...
from bdbcontrib.api import barplot
from bdbcontrib.api import cardinality
from bdbcontrib.api import draw_crosscat
from bdbcontrib.api import estimate_log_likelihood
from bdbcontrib.api import heatmap
from bdbcontrib.api import histogram
from bdbcontrib.api import mi_hist
from bdbcontrib.api import nullify
from bdbcontrib.api import pairplot
from bdbcontrib.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
# ... rest of the code ...
|
f800d11aa5a198fcb2193773b30e4e066a226321
|
code/handle-output.py
|
code/handle-output.py
|
import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
|
import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
|
Set resu dir and data dir
|
Set resu dir and data dir
|
Python
|
mit
|
chagaz/sfan,chagaz/sfan,chagaz/sfan,chagaz/sfan,chagaz/sfan
|
python
|
## Code Before:
import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
## Instruction:
Set resu dir and data dir
## Code After:
import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
|
# ... existing code ...
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
# ... rest of the code ...
|
787fde3532ce508e9c1bad24e0edfcfe969615aa
|
sticks.c
|
sticks.c
|
typedef struct {
int hands[2][2];
int turn;
} Sticks;
void sticks_create(Sticks *sticks) {
sticks->hands[0][0] = 1;
sticks->hands[0][1] = 1;
sticks->hands[1][0] = 1;
sticks->hands[1][1] = 1;
sticks->turn = 0;
}
void sticks_play(Sticks *sticks, int actor, int target) {
sticks->hands[!sticks->turn][target] += sticks->hands[sticks->turn][actor];
if (sticks->hands[!sticks->turn][target] >= 5) {
sticks->hands[!sticks->turn][target] = 0;
}
sticks->turn = !sticks->turn;
}
int main(void) {
Sticks sticks;
sticks_create(&sticks);
printf("%d\n", sticks.hands[0][0]);
printf("%d\n", sticks.turn);
sticks_play(&sticks, 0, 1);
printf("%d\n", sticks.hands[1][1]);
printf("%d\n", sticks.turn);
}
|
typedef struct {
int hands[2][2];
int turn;
} Sticks;
void sticks_create(Sticks *sticks) {
sticks->hands[0][0] = 1;
sticks->hands[0][1] = 1;
sticks->hands[1][0] = 1;
sticks->hands[1][1] = 1;
sticks->turn = 0;
}
void sticks_play(Sticks *sticks, int x, int y) {
sticks->hands[!sticks->turn][y] += sticks->hands[sticks->turn][x];
if (sticks->hands[!sticks->turn][y] >= 5) {
sticks->hands[!sticks->turn][y] = 0;
}
sticks->turn = !sticks->turn;
}
int main(void) {
Sticks sticks;
sticks_create(&sticks);
printf("%d\n", sticks.hands[0][0]);
printf("%d\n", sticks.turn);
sticks_play(&sticks, 0, 1);
printf("%d\n", sticks.hands[1][1]);
printf("%d\n", sticks.turn);
}
|
Rename actor/target to x/y to allow for shifting
|
Rename actor/target to x/y to allow for shifting
|
C
|
mit
|
tysonzero/c-ann
|
c
|
## Code Before:
typedef struct {
int hands[2][2];
int turn;
} Sticks;
void sticks_create(Sticks *sticks) {
sticks->hands[0][0] = 1;
sticks->hands[0][1] = 1;
sticks->hands[1][0] = 1;
sticks->hands[1][1] = 1;
sticks->turn = 0;
}
void sticks_play(Sticks *sticks, int actor, int target) {
sticks->hands[!sticks->turn][target] += sticks->hands[sticks->turn][actor];
if (sticks->hands[!sticks->turn][target] >= 5) {
sticks->hands[!sticks->turn][target] = 0;
}
sticks->turn = !sticks->turn;
}
int main(void) {
Sticks sticks;
sticks_create(&sticks);
printf("%d\n", sticks.hands[0][0]);
printf("%d\n", sticks.turn);
sticks_play(&sticks, 0, 1);
printf("%d\n", sticks.hands[1][1]);
printf("%d\n", sticks.turn);
}
## Instruction:
Rename actor/target to x/y to allow for shifting
## Code After:
typedef struct {
int hands[2][2];
int turn;
} Sticks;
void sticks_create(Sticks *sticks) {
sticks->hands[0][0] = 1;
sticks->hands[0][1] = 1;
sticks->hands[1][0] = 1;
sticks->hands[1][1] = 1;
sticks->turn = 0;
}
void sticks_play(Sticks *sticks, int x, int y) {
sticks->hands[!sticks->turn][y] += sticks->hands[sticks->turn][x];
if (sticks->hands[!sticks->turn][y] >= 5) {
sticks->hands[!sticks->turn][y] = 0;
}
sticks->turn = !sticks->turn;
}
int main(void) {
Sticks sticks;
sticks_create(&sticks);
printf("%d\n", sticks.hands[0][0]);
printf("%d\n", sticks.turn);
sticks_play(&sticks, 0, 1);
printf("%d\n", sticks.hands[1][1]);
printf("%d\n", sticks.turn);
}
|
# ... existing code ...
sticks->turn = 0;
}
void sticks_play(Sticks *sticks, int x, int y) {
sticks->hands[!sticks->turn][y] += sticks->hands[sticks->turn][x];
if (sticks->hands[!sticks->turn][y] >= 5) {
sticks->hands[!sticks->turn][y] = 0;
}
sticks->turn = !sticks->turn;
}
# ... rest of the code ...
|
c37500894b309a691009b87b1305935ee57648cb
|
tests/test_test.py
|
tests/test_test.py
|
import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://aidtransparency.net/"
]
text_to_find = [
("information", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
|
import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
|
Add test text finding that fails
|
Add test text finding that fails
This indicates that a different method of specifying how and where
to find text within a document is required.
|
Python
|
mit
|
IATI/IATI-Website-Tests
|
python
|
## Code Before:
import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://aidtransparency.net/"
]
text_to_find = [
("information", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
## Instruction:
Add test text finding that fails
This indicates that a different method of specifying how and where
to find text within a document is required.
## Code After:
import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
|
// ... existing code ...
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
// ... rest of the code ...
|
7a59999961b67dbd480c80a4a4f95fa6738b2949
|
day-20/solution.py
|
day-20/solution.py
|
from __future__ import print_function
def findFirst(data, target):
for idx, value in enumerate(data):
if value >= target:
return idx
return None
target = 34000000
# Target is achieved at itself/10, so reasonable upper bound.
upperbound = target // 10
# Use a varation of Erathostenes' sieve to compute the results
sieve1 = [10] * (upperbound + 1)
sieve2 = [10] * (upperbound + 1)
for x in range(1, upperbound):
for y in range(x, upperbound, x):
sieve1[y] += 10 * x
for y in range(x, min(50 * x, upperbound) + 1, x):
sieve2[y] += 11 * x
print("House", findFirst(sieve1, target))
print("House", findFirst(sieve2, target))
|
from __future__ import print_function
def findFirst(data, target):
return next(idx for idx, value in enumerate(data) if value >= target)
target = 34000000
# Target is achieved at itself/10, so reasonable upper bound.
upperbound = target // 10
# Use a varation of Erathostenes' sieve to compute the results
sieve1 = [10] * (upperbound + 1)
sieve2 = [10] * (upperbound + 1)
for x in range(1, upperbound):
for y in range(x, upperbound, x):
sieve1[y] += 10 * x
for y in range(x, min(50 * x, upperbound) + 1, x):
sieve2[y] += 11 * x
print("House", findFirst(sieve1, target))
print("House", findFirst(sieve2, target))
|
Improve getting the first valid value.
|
Improve getting the first valid value.
|
Python
|
mit
|
bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode
|
python
|
## Code Before:
from __future__ import print_function
def findFirst(data, target):
for idx, value in enumerate(data):
if value >= target:
return idx
return None
target = 34000000
# Target is achieved at itself/10, so reasonable upper bound.
upperbound = target // 10
# Use a varation of Erathostenes' sieve to compute the results
sieve1 = [10] * (upperbound + 1)
sieve2 = [10] * (upperbound + 1)
for x in range(1, upperbound):
for y in range(x, upperbound, x):
sieve1[y] += 10 * x
for y in range(x, min(50 * x, upperbound) + 1, x):
sieve2[y] += 11 * x
print("House", findFirst(sieve1, target))
print("House", findFirst(sieve2, target))
## Instruction:
Improve getting the first valid value.
## Code After:
from __future__ import print_function
def findFirst(data, target):
return next(idx for idx, value in enumerate(data) if value >= target)
target = 34000000
# Target is achieved at itself/10, so reasonable upper bound.
upperbound = target // 10
# Use a varation of Erathostenes' sieve to compute the results
sieve1 = [10] * (upperbound + 1)
sieve2 = [10] * (upperbound + 1)
for x in range(1, upperbound):
for y in range(x, upperbound, x):
sieve1[y] += 10 * x
for y in range(x, min(50 * x, upperbound) + 1, x):
sieve2[y] += 11 * x
print("House", findFirst(sieve1, target))
print("House", findFirst(sieve2, target))
|
...
from __future__ import print_function
def findFirst(data, target):
return next(idx for idx, value in enumerate(data) if value >= target)
target = 34000000
...
|
c32e9666925de601ef2a5dfd9b1018d7bfac00e6
|
setup.py
|
setup.py
|
"""Django/PostgreSQL implementation of the Meteor DDP service."""
import os.path
from setuptools import setup, find_packages
setup(
name='django-ddp',
version='0.1.0',
description=__doc__,
long_description=open('README.rst').read(),
author='Tyson Clugg',
author_email='[email protected]',
url='https://github.com/commoncode/django-ddp',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django>=1.7',
'psycopg2>=2.5.4',
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
],
)
|
"""Django/PostgreSQL implementation of the Meteor DDP service."""
import os.path
from setuptools import setup, find_packages
setup(
name='django-ddp',
version='0.1.1',
description=__doc__,
long_description=open('README.rst').read(),
author='Tyson Clugg',
author_email='[email protected]',
url='https://github.com/commoncode/django-ddp',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django>=1.7',
'psycopg2>=2.5.4',
'gevent>=1.0',
'gevent-websocket>=0.9',
'meteor-ejson>=1.0',
'psycogreen>=1.0',
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
],
)
|
Add missing dependencies, new release (0.1.1).
|
Add missing dependencies, new release (0.1.1).
|
Python
|
mit
|
PythonicNinja/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,PythonicNinja/django-ddp,django-ddp/django-ddp,django-ddp/django-ddp,commoncode/django-ddp,commoncode/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,PythonicNinja/django-ddp
|
python
|
## Code Before:
"""Django/PostgreSQL implementation of the Meteor DDP service."""
import os.path
from setuptools import setup, find_packages
setup(
name='django-ddp',
version='0.1.0',
description=__doc__,
long_description=open('README.rst').read(),
author='Tyson Clugg',
author_email='[email protected]',
url='https://github.com/commoncode/django-ddp',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django>=1.7',
'psycopg2>=2.5.4',
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
],
)
## Instruction:
Add missing dependencies, new release (0.1.1).
## Code After:
"""Django/PostgreSQL implementation of the Meteor DDP service."""
import os.path
from setuptools import setup, find_packages
setup(
name='django-ddp',
version='0.1.1',
description=__doc__,
long_description=open('README.rst').read(),
author='Tyson Clugg',
author_email='[email protected]',
url='https://github.com/commoncode/django-ddp',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django>=1.7',
'psycopg2>=2.5.4',
'gevent>=1.0',
'gevent-websocket>=0.9',
'meteor-ejson>=1.0',
'psycogreen>=1.0',
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
],
)
|
...
setup(
name='django-ddp',
version='0.1.1',
description=__doc__,
long_description=open('README.rst').read(),
author='Tyson Clugg',
...
install_requires=[
'Django>=1.7',
'psycopg2>=2.5.4',
'gevent>=1.0',
'gevent-websocket>=0.9',
'meteor-ejson>=1.0',
'psycogreen>=1.0',
],
classifiers=[
"Programming Language :: Python :: 2",
...
|
de97d95d7746cbbf6c2c53a660553ce56d294288
|
tests/test_unit.py
|
tests/test_unit.py
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from pytest import raises, yield_fixture, mark
from lantz_core import unit
from lantz_core.unit import (set_unit_registry, get_unit_registry,
to_float, to_quantity)
try:
from pint import UnitRegistry
except ImportError:
pass
@yield_fixture
def teardown():
unit.UNIT_REGISTRY = None
yield
unit.UNIT_REGISTRY = None
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_set_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
assert get_unit_registry() is ureg
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_reset_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
with raises(ValueError):
set_unit_registry(ureg)
def test_converters(teardown):
"""Test to_quantity and to_float utility functions.
"""
val = 1.0
assert to_float(to_quantity(val, 'A')) == val
|
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from pytest import raises, yield_fixture, mark
from lantz_core import unit
from lantz_core.unit import (set_unit_registry, get_unit_registry,
to_float, to_quantity)
try:
from pint import UnitRegistry
except ImportError:
pass
@yield_fixture
def teardown():
unit.UNIT_REGISTRY = None
yield
unit.UNIT_REGISTRY = None
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_set_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
assert get_unit_registry() is ureg
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_reset_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
with raises(ValueError):
set_unit_registry(ureg)
def test_converters(teardown):
"""Test to_quantity and to_float utility functions.
"""
val = 1.0
assert to_float(val) == val
assert to_float(to_quantity(val, 'A')) == val
|
Add missing test for to_float applied on a float (when pint is present).
|
Add missing test for to_float applied on a float (when pint is present).
|
Python
|
bsd-3-clause
|
MatthieuDartiailh/lantz_core
|
python
|
## Code Before:
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from pytest import raises, yield_fixture, mark
from lantz_core import unit
from lantz_core.unit import (set_unit_registry, get_unit_registry,
to_float, to_quantity)
try:
from pint import UnitRegistry
except ImportError:
pass
@yield_fixture
def teardown():
unit.UNIT_REGISTRY = None
yield
unit.UNIT_REGISTRY = None
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_set_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
assert get_unit_registry() is ureg
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_reset_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
with raises(ValueError):
set_unit_registry(ureg)
def test_converters(teardown):
"""Test to_quantity and to_float utility functions.
"""
val = 1.0
assert to_float(to_quantity(val, 'A')) == val
## Instruction:
Add missing test for to_float applied on a float (when pint is present).
## Code After:
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from pytest import raises, yield_fixture, mark
from lantz_core import unit
from lantz_core.unit import (set_unit_registry, get_unit_registry,
to_float, to_quantity)
try:
from pint import UnitRegistry
except ImportError:
pass
@yield_fixture
def teardown():
unit.UNIT_REGISTRY = None
yield
unit.UNIT_REGISTRY = None
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_set_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
assert get_unit_registry() is ureg
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_reset_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
with raises(ValueError):
set_unit_registry(ureg)
def test_converters(teardown):
"""Test to_quantity and to_float utility functions.
"""
val = 1.0
assert to_float(val) == val
assert to_float(to_quantity(val, 'A')) == val
|
# ... existing code ...
"""
val = 1.0
assert to_float(val) == val
assert to_float(to_quantity(val, 'A')) == val
# ... rest of the code ...
|
ec6236a16d88eae7f079448946cb91b1484f92e5
|
inc/JsonSerializer.h
|
inc/JsonSerializer.h
|
//! \file
#ifndef JSONSERIALIZER_H
#define JSONSERIALIZER_H
#include <iostream>
#include "ArticleCollection.h"
namespace WikiWalker
{
/*! Serialize AricleCollection from and to a custom JSON format
*/
class JsonSerializer
{
public:
/*! Serialize ArticleCollection to JSON in an output stream
* \param a pointer to article to be output
* \param os out stream to putput to.
* YOU are responsible for opening and closing the stream
*/
void serialize(const ArticleCollection& collection,
std::ostream& outstream);
};
}
#endif // JSONSERIALIZER_H
|
//! \file
#ifndef WIKIWALKER_JSONSERIALIZER_H
#define WIKIWALKER_JSONSERIALIZER_H
#include <iostream>
#include "ArticleCollection.h"
namespace WikiWalker
{
/*! Serialize AricleCollection from and to a custom JSON format
*/
class JsonSerializer
{
public:
/*! Serialize ArticleCollection to JSON in an output stream
* \param a pointer to article to be output
* \param os out stream to putput to.
* YOU are responsible for opening and closing the stream
*/
void serialize(const ArticleCollection& collection,
std::ostream& outstream);
};
}
#endif // WIKIWALKER_JSONSERIALIZER_H
|
Apply prefix to include guard
|
Apply prefix to include guard
|
C
|
mit
|
dueringa/WikiWalker
|
c
|
## Code Before:
//! \file
#ifndef JSONSERIALIZER_H
#define JSONSERIALIZER_H
#include <iostream>
#include "ArticleCollection.h"
namespace WikiWalker
{
/*! Serialize AricleCollection from and to a custom JSON format
*/
class JsonSerializer
{
public:
/*! Serialize ArticleCollection to JSON in an output stream
* \param a pointer to article to be output
* \param os out stream to putput to.
* YOU are responsible for opening and closing the stream
*/
void serialize(const ArticleCollection& collection,
std::ostream& outstream);
};
}
#endif // JSONSERIALIZER_H
## Instruction:
Apply prefix to include guard
## Code After:
//! \file
#ifndef WIKIWALKER_JSONSERIALIZER_H
#define WIKIWALKER_JSONSERIALIZER_H
#include <iostream>
#include "ArticleCollection.h"
namespace WikiWalker
{
/*! Serialize AricleCollection from and to a custom JSON format
*/
class JsonSerializer
{
public:
/*! Serialize ArticleCollection to JSON in an output stream
* \param a pointer to article to be output
* \param os out stream to putput to.
* YOU are responsible for opening and closing the stream
*/
void serialize(const ArticleCollection& collection,
std::ostream& outstream);
};
}
#endif // WIKIWALKER_JSONSERIALIZER_H
|
...
//! \file
#ifndef WIKIWALKER_JSONSERIALIZER_H
#define WIKIWALKER_JSONSERIALIZER_H
#include <iostream>
...
std::ostream& outstream);
};
}
#endif // WIKIWALKER_JSONSERIALIZER_H
...
|
5f9d8b30313200d9baa55ea468ad5b94481ba871
|
bianca/orm/repository.py
|
bianca/orm/repository.py
|
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'url': self.url,
'creation_date': self.creation_date,
'ingestion_date': self.ingestion_date,
'last_ingested_commit': self.last_ingested_commit,
'analysis_date': self.analysis_date,
'status': self.status,
'email': self.email,
'listed': self.listed,
'last_data_dump': self.last_data_dump
}
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
# def __repr__(self):
# return "<Repository: %s - %s>" % (self.name, self.id)
|
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
|
Make repo serializable via as_dict
|
Make repo serializable via as_dict
|
Python
|
mit
|
bumper-app/bumper-bianca,bumper-app/bumper-bianca
|
python
|
## Code Before:
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'url': self.url,
'creation_date': self.creation_date,
'ingestion_date': self.ingestion_date,
'last_ingested_commit': self.last_ingested_commit,
'analysis_date': self.analysis_date,
'status': self.status,
'email': self.email,
'listed': self.listed,
'last_data_dump': self.last_data_dump
}
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
# def __repr__(self):
# return "<Repository: %s - %s>" % (self.name, self.id)
## Instruction:
Make repo serializable via as_dict
## Code After:
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
|
// ... existing code ...
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
// ... rest of the code ...
|
c1a6bcc1d2a4353728d736df77812c2f605a7f3b
|
haval-algorithm/src/main/java/com/m4gik/util/Util.java
|
haval-algorithm/src/main/java/com/m4gik/util/Util.java
|
package com.m4gik.util;
/**
* A collection of utility methods used throughout this project. This class
* mainly contains method for implementing the operations of presenting data.
*
* @author Michał Szczygieł <[email protected]>
*
*/
public class Util {
}
|
package com.m4gik.util;
/**
* A collection of utility methods used throughout this project. This class
* mainly contains method for implementing the operations of presenting data.
*
* @author Michał Szczygieł <[email protected]>
*
*/
public class Util {
/**
* Base-64 chars.
*/
private static final String BASE64_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz./";
/**
* Base-64 charset.
*/
private static final char[] BASE64_CHARSET = BASE64_CHARS.toCharArray();
/**
* Hex charset.
*/
private static final char[] HEX_DIGITS = "0123456789ABCDEF".toCharArray();
/**
* Trivial constructor to enforce Singleton pattern.
*/
private Util() {
super();
}
}
|
Add base static fields and private constructor.
|
Add base static fields and private constructor.
|
Java
|
mit
|
M4GiK/tosi-projects,M4GiK/tosi-projects
|
java
|
## Code Before:
package com.m4gik.util;
/**
* A collection of utility methods used throughout this project. This class
* mainly contains method for implementing the operations of presenting data.
*
* @author Michał Szczygieł <[email protected]>
*
*/
public class Util {
}
## Instruction:
Add base static fields and private constructor.
## Code After:
package com.m4gik.util;
/**
* A collection of utility methods used throughout this project. This class
* mainly contains method for implementing the operations of presenting data.
*
* @author Michał Szczygieł <[email protected]>
*
*/
public class Util {
/**
* Base-64 chars.
*/
private static final String BASE64_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz./";
/**
* Base-64 charset.
*/
private static final char[] BASE64_CHARSET = BASE64_CHARS.toCharArray();
/**
* Hex charset.
*/
private static final char[] HEX_DIGITS = "0123456789ABCDEF".toCharArray();
/**
* Trivial constructor to enforce Singleton pattern.
*/
private Util() {
super();
}
}
|
# ... existing code ...
*/
public class Util {
/**
* Base-64 chars.
*/
private static final String BASE64_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz./";
/**
* Base-64 charset.
*/
private static final char[] BASE64_CHARSET = BASE64_CHARS.toCharArray();
/**
* Hex charset.
*/
private static final char[] HEX_DIGITS = "0123456789ABCDEF".toCharArray();
/**
* Trivial constructor to enforce Singleton pattern.
*/
private Util() {
super();
}
}
# ... rest of the code ...
|
f5d56b0c54af414f02721a1a02a0eaf80dbba898
|
client/python/unrealcv/util.py
|
client/python/unrealcv/util.py
|
import numpy as np
import PIL
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
import PIL.Image
img = PIL.Image.open(BytesIO(res))
return np.asarray(img)
def read_npy(res):
# res is a binary buffer
return np.load(BytesIO(res))
|
import numpy as np
import PIL.Image
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
img = None
try:
PIL_img = PIL.Image.open(BytesIO(res))
img = np.asarray(PIL_img)
except:
print('Read png can not parse response %s' % str(res[:20]))
return img
def read_npy(res):
# res is a binary buffer
arr = None
try:
arr = np.load(BytesIO(res))
except:
print('Read npy can not parse response %s' % str(res[:20]))
return arr
|
Handle exceptions in read_png and read_npy.
|
Handle exceptions in read_png and read_npy.
|
Python
|
mit
|
unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv
|
python
|
## Code Before:
import numpy as np
import PIL
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
import PIL.Image
img = PIL.Image.open(BytesIO(res))
return np.asarray(img)
def read_npy(res):
# res is a binary buffer
return np.load(BytesIO(res))
## Instruction:
Handle exceptions in read_png and read_npy.
## Code After:
import numpy as np
import PIL.Image
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
img = None
try:
PIL_img = PIL.Image.open(BytesIO(res))
img = np.asarray(PIL_img)
except:
print('Read png can not parse response %s' % str(res[:20]))
return img
def read_npy(res):
# res is a binary buffer
arr = None
try:
arr = np.load(BytesIO(res))
except:
print('Read npy can not parse response %s' % str(res[:20]))
return arr
|
# ... existing code ...
import numpy as np
import PIL.Image
from io import BytesIO
# StringIO module is removed in python3, use io module
def read_png(res):
img = None
try:
PIL_img = PIL.Image.open(BytesIO(res))
img = np.asarray(PIL_img)
except:
print('Read png can not parse response %s' % str(res[:20]))
return img
def read_npy(res):
# res is a binary buffer
arr = None
try:
arr = np.load(BytesIO(res))
except:
print('Read npy can not parse response %s' % str(res[:20]))
return arr
# ... rest of the code ...
|
aaaf165b247a1a8ea5cd2936d9fd1eefe5e580f9
|
arch/arm/mach-kirkwood/board-iomega_ix2_200.c
|
arch/arm/mach-kirkwood/board-iomega_ix2_200.c
|
/*
* arch/arm/mach-kirkwood/board-iomega_ix2_200.c
*
* Iomega StorCenter ix2-200
*
* This file is licensed under the terms of the GNU General Public
* License version 2. This program is licensed "as is" without any
* warranty of any kind, whether express or implied.
*/
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/mv643xx_eth.h>
#include <linux/ethtool.h>
#include "common.h"
static struct mv643xx_eth_platform_data iomega_ix2_200_ge00_data = {
.phy_addr = MV643XX_ETH_PHY_NONE,
.speed = SPEED_1000,
.duplex = DUPLEX_FULL,
};
void __init iomega_ix2_200_init(void)
{
/*
* Basic setup. Needs to be called early.
*/
kirkwood_ge01_init(&iomega_ix2_200_ge00_data);
}
|
/*
* arch/arm/mach-kirkwood/board-iomega_ix2_200.c
*
* Iomega StorCenter ix2-200
*
* This file is licensed under the terms of the GNU General Public
* License version 2. This program is licensed "as is" without any
* warranty of any kind, whether express or implied.
*/
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/mv643xx_eth.h>
#include <linux/ethtool.h>
#include "common.h"
static struct mv643xx_eth_platform_data iomega_ix2_200_ge00_data = {
.phy_addr = MV643XX_ETH_PHY_NONE,
.speed = SPEED_1000,
.duplex = DUPLEX_FULL,
};
static struct mv643xx_eth_platform_data iomega_ix2_200_ge01_data = {
.phy_addr = MV643XX_ETH_PHY_ADDR(11),
};
void __init iomega_ix2_200_init(void)
{
/*
* Basic setup. Needs to be called early.
*/
kirkwood_ge00_init(&iomega_ix2_200_ge00_data);
kirkwood_ge01_init(&iomega_ix2_200_ge01_data);
}
|
Fix GE0/GE1 init on ix2-200 as GE0 has no PHY
|
Fix GE0/GE1 init on ix2-200 as GE0 has no PHY
Signed-off-by: Jason Cooper <[email protected]>
|
C
|
mit
|
KristFoundation/Programs,KristFoundation/Programs,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs
|
c
|
## Code Before:
/*
* arch/arm/mach-kirkwood/board-iomega_ix2_200.c
*
* Iomega StorCenter ix2-200
*
* This file is licensed under the terms of the GNU General Public
* License version 2. This program is licensed "as is" without any
* warranty of any kind, whether express or implied.
*/
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/mv643xx_eth.h>
#include <linux/ethtool.h>
#include "common.h"
static struct mv643xx_eth_platform_data iomega_ix2_200_ge00_data = {
.phy_addr = MV643XX_ETH_PHY_NONE,
.speed = SPEED_1000,
.duplex = DUPLEX_FULL,
};
void __init iomega_ix2_200_init(void)
{
/*
* Basic setup. Needs to be called early.
*/
kirkwood_ge01_init(&iomega_ix2_200_ge00_data);
}
## Instruction:
Fix GE0/GE1 init on ix2-200 as GE0 has no PHY
Signed-off-by: Jason Cooper <[email protected]>
## Code After:
/*
* arch/arm/mach-kirkwood/board-iomega_ix2_200.c
*
* Iomega StorCenter ix2-200
*
* This file is licensed under the terms of the GNU General Public
* License version 2. This program is licensed "as is" without any
* warranty of any kind, whether express or implied.
*/
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/mv643xx_eth.h>
#include <linux/ethtool.h>
#include "common.h"
static struct mv643xx_eth_platform_data iomega_ix2_200_ge00_data = {
.phy_addr = MV643XX_ETH_PHY_NONE,
.speed = SPEED_1000,
.duplex = DUPLEX_FULL,
};
static struct mv643xx_eth_platform_data iomega_ix2_200_ge01_data = {
.phy_addr = MV643XX_ETH_PHY_ADDR(11),
};
void __init iomega_ix2_200_init(void)
{
/*
* Basic setup. Needs to be called early.
*/
kirkwood_ge00_init(&iomega_ix2_200_ge00_data);
kirkwood_ge01_init(&iomega_ix2_200_ge01_data);
}
|
// ... existing code ...
.duplex = DUPLEX_FULL,
};
static struct mv643xx_eth_platform_data iomega_ix2_200_ge01_data = {
.phy_addr = MV643XX_ETH_PHY_ADDR(11),
};
void __init iomega_ix2_200_init(void)
{
/*
* Basic setup. Needs to be called early.
*/
kirkwood_ge00_init(&iomega_ix2_200_ge00_data);
kirkwood_ge01_init(&iomega_ix2_200_ge01_data);
}
// ... rest of the code ...
|
906c71ed59a6349aed83cd18248dfe8463e3a028
|
src/integrate_tool.py
|
src/integrate_tool.py
|
from bioblend import galaxy
from bioblend import toolshed
if __name__ == '__main__':
gi_url = "http://172.21.23.6:8080/"
ts_url = "http://172.21.23.6:9009/"
name = "qiime"
owner = "iuc"
tool_panel_section_id = "qiime_rRNA_taxonomic_assignation"
gi = galaxy.GalaxyInstance(url=gi_url, key='8a099e97b0a83c73ead9f5b0fe19f4be')
ts = toolshed.ToolShedInstance(url=ts_url)
changeset_revision = str(ts.repositories.get_ordered_installable_revisions(name,
owner)[-1])
gi.toolShed.install_repository_revision(ts_url, name, owner, changeset_revision,
install_tool_dependencies=True, install_repository_dependencies=True,
tool_panel_section_id=tool_panel_section_id)
|
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
|
Improve integrate tool wrapper with arguments
|
Improve integrate tool wrapper with arguments
|
Python
|
apache-2.0
|
ASaiM/framework,ASaiM/framework
|
python
|
## Code Before:
from bioblend import galaxy
from bioblend import toolshed
if __name__ == '__main__':
gi_url = "http://172.21.23.6:8080/"
ts_url = "http://172.21.23.6:9009/"
name = "qiime"
owner = "iuc"
tool_panel_section_id = "qiime_rRNA_taxonomic_assignation"
gi = galaxy.GalaxyInstance(url=gi_url, key='8a099e97b0a83c73ead9f5b0fe19f4be')
ts = toolshed.ToolShedInstance(url=ts_url)
changeset_revision = str(ts.repositories.get_ordered_installable_revisions(name,
owner)[-1])
gi.toolShed.install_repository_revision(ts_url, name, owner, changeset_revision,
install_tool_dependencies=True, install_repository_dependencies=True,
tool_panel_section_id=tool_panel_section_id)
## Instruction:
Improve integrate tool wrapper with arguments
## Code After:
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
|
# ... existing code ...
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
# ... rest of the code ...
|
1653984ae19d2a5ec8d154be6ad5bb146fcf496b
|
src/main/java/com/thegongoliers/pathFollowing/PathTaskCommand.java
|
src/main/java/com/thegongoliers/pathFollowing/PathTaskCommand.java
|
package com.thegongoliers.pathFollowing;
import edu.wpi.first.wpilibj.command.Command;
public abstract class PathTaskCommand extends Command {
protected SmartDriveTrainSubsystem drivetrain;
public PathTaskCommand(SmartDriveTrainSubsystem drivetrain){
requires(drivetrain);
this.drivetrain = drivetrain;
}
@Override
abstract protected void execute();
@Override
abstract protected boolean isFinished();
}
|
package com.thegongoliers.pathFollowing;
import edu.wpi.first.wpilibj.command.Command;
public abstract class PathTaskCommand extends Command {
protected SmartDriveTrainSubsystem drivetrain;
public PathTaskCommand(SmartDriveTrainSubsystem drivetrain){
requires(drivetrain);
this.drivetrain = drivetrain;
}
@Override
abstract protected void execute();
@Override
abstract protected boolean isFinished();
@Override
protected void end() {
drivetrain.stop();
}
@Override
protected void interrupted() {
drivetrain.stop();
}
}
|
Make drivetrain stop at end of path
|
Make drivetrain stop at end of path
|
Java
|
mit
|
Gongoliers/Library-of-Gongolierium
|
java
|
## Code Before:
package com.thegongoliers.pathFollowing;
import edu.wpi.first.wpilibj.command.Command;
public abstract class PathTaskCommand extends Command {
protected SmartDriveTrainSubsystem drivetrain;
public PathTaskCommand(SmartDriveTrainSubsystem drivetrain){
requires(drivetrain);
this.drivetrain = drivetrain;
}
@Override
abstract protected void execute();
@Override
abstract protected boolean isFinished();
}
## Instruction:
Make drivetrain stop at end of path
## Code After:
package com.thegongoliers.pathFollowing;
import edu.wpi.first.wpilibj.command.Command;
public abstract class PathTaskCommand extends Command {
protected SmartDriveTrainSubsystem drivetrain;
public PathTaskCommand(SmartDriveTrainSubsystem drivetrain){
requires(drivetrain);
this.drivetrain = drivetrain;
}
@Override
abstract protected void execute();
@Override
abstract protected boolean isFinished();
@Override
protected void end() {
drivetrain.stop();
}
@Override
protected void interrupted() {
drivetrain.stop();
}
}
|
...
@Override
abstract protected boolean isFinished();
@Override
protected void end() {
drivetrain.stop();
}
@Override
protected void interrupted() {
drivetrain.stop();
}
}
...
|
7ac3e48d1934e7a749590d875a3f5e4423fa6c72
|
linked_list.py
|
linked_list.py
|
class SinglyLinked(object):
def __init__(self):
pass
def insert(self, val):
# insert val at beginning of list
pass
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def print_(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
pass
|
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
|
Create Node class; construct insert method
|
Create Node class; construct insert method
|
Python
|
mit
|
jwarren116/data-structures
|
python
|
## Code Before:
class SinglyLinked(object):
def __init__(self):
pass
def insert(self, val):
# insert val at beginning of list
pass
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def print_(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
pass
## Instruction:
Create Node class; construct insert method
## Code After:
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
|
...
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
...
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
...
|
9409b9da1392514b7da5db4d44a32b47d8452e67
|
play.py
|
play.py
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.modulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
ns = wxs.getTargetNamespace()
enum_prefix_map = [ ( 'colorModeEnum', 'CM' )
, ( 'styleStateEnum', 'SS' )
, ( 'itemIconStateEnum', 'IIS' )
, ( 'listItemTypeEnum', 'LIT' )
, ( 'unitsEnum', 'Units' )
]
for (std_name, enum_prefix) in enum_prefix_map:
cm = ns.lookupTypeDefinition(std_name)
if cm is not None:
facet = cm.facets().get(xs.facets.CF_enumeration, None)
if facet is not None:
facet.enumPrefix('%s_' % enum_prefix)
gen = Generator(ns, 'xs')
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('formChoice')]))
print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('viewRefreshModeEnum')]))
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('NetworkLinkControlType')]))
#print "\n".join(gen.generateDefinitions(ns.typeDefinitions()))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
Update to new namespace interface, walk components
|
Update to new namespace interface, walk components
|
Python
|
apache-2.0
|
jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,balanced/PyXB,pabigot/pyxb,balanced/PyXB,jonfoster/pyxb2,jonfoster/pyxb1,jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,CantemoInternal/pyxb,CantemoInternal/pyxb,jonfoster/pyxb-upstream-mirror,jonfoster/pyxb1,CantemoInternal/pyxb,pabigot/pyxb,balanced/PyXB
|
python
|
## Code Before:
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.modulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
ns = wxs.getTargetNamespace()
enum_prefix_map = [ ( 'colorModeEnum', 'CM' )
, ( 'styleStateEnum', 'SS' )
, ( 'itemIconStateEnum', 'IIS' )
, ( 'listItemTypeEnum', 'LIT' )
, ( 'unitsEnum', 'Units' )
]
for (std_name, enum_prefix) in enum_prefix_map:
cm = ns.lookupTypeDefinition(std_name)
if cm is not None:
facet = cm.facets().get(xs.facets.CF_enumeration, None)
if facet is not None:
facet.enumPrefix('%s_' % enum_prefix)
gen = Generator(ns, 'xs')
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('formChoice')]))
print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('viewRefreshModeEnum')]))
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('NetworkLinkControlType')]))
#print "\n".join(gen.generateDefinitions(ns.typeDefinitions()))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
## Instruction:
Update to new namespace interface, walk components
## Code After:
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
// ... existing code ...
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
// ... rest of the code ...
|
0c46e1fccad4e19eda5c380eb648324eb7c8a32e
|
src/common/file_manager_interface.h
|
src/common/file_manager_interface.h
|
class FileManagerInterface {
public:
virtual bool send(
const std::string & host,
const unsigned short host_port,
std::string & file_path,
uint64_t from = 0,
uint64_t to = -1) = 0;
virtual ~FileManagerInterface(){};
};
#endif // _FILE_MANAGER_INTERFACE_
|
class FileManagerInterface {
public:
virtual uint64_t send(
const std::string & host,
const unsigned short host_port,
std::string & file_path,
uint64_t from = 0,
uint64_t to = -1) = 0;
virtual std::std::vector<uint64_t> getIds() = 0;
virtual ~FileManagerInterface(){};
};
#endif // _FILE_MANAGER_INTERFACE_
|
Add getIds to file manager
|
Add getIds to file manager
|
C
|
mit
|
Plamenod/P2P
|
c
|
## Code Before:
class FileManagerInterface {
public:
virtual bool send(
const std::string & host,
const unsigned short host_port,
std::string & file_path,
uint64_t from = 0,
uint64_t to = -1) = 0;
virtual ~FileManagerInterface(){};
};
#endif // _FILE_MANAGER_INTERFACE_
## Instruction:
Add getIds to file manager
## Code After:
class FileManagerInterface {
public:
virtual uint64_t send(
const std::string & host,
const unsigned short host_port,
std::string & file_path,
uint64_t from = 0,
uint64_t to = -1) = 0;
virtual std::std::vector<uint64_t> getIds() = 0;
virtual ~FileManagerInterface(){};
};
#endif // _FILE_MANAGER_INTERFACE_
|
# ... existing code ...
class FileManagerInterface {
public:
virtual uint64_t send(
const std::string & host,
const unsigned short host_port,
std::string & file_path,
uint64_t from = 0,
uint64_t to = -1) = 0;
virtual std::std::vector<uint64_t> getIds() = 0;
virtual ~FileManagerInterface(){};
};
# ... rest of the code ...
|
d8326895206ded5fdcb5d31050debc3711ff5aca
|
src/main/java/org/galibier/messaging/benchmark/zookeeper/ZKOperation.java
|
src/main/java/org/galibier/messaging/benchmark/zookeeper/ZKOperation.java
|
package org.galibier.messaging.benchmark.zookeeper;
import com.netflix.curator.framework.CuratorFramework;
import com.netflix.curator.framework.CuratorFrameworkFactory;
import com.netflix.curator.retry.RetryOneTime;
import org.apache.zookeeper.KeeperException;
import org.galibier.messaging.benchmark.Message;
import org.galibier.messaging.benchmark.Operation;
public abstract class ZKOperation implements Operation {
protected final CuratorFramework client;
protected final String path;
public ZKOperation(String host, String path) {
this.client = CuratorFrameworkFactory.newClient(host, new RetryOneTime(100));
this.path = path;
}
@Override
public void initialize() {
client.start();
try {
client.create().forPath(path, Message.getDefault());
} catch (KeeperException.NodeExistsException e) {
// ignore
} catch (Exception e) {
System.out.println("Initialization failed");
System.exit(1);
}
}
@Override
public void terminate() {
try {
client.delete().forPath(path);
} catch (KeeperException.NoNodeException e) {
// ignore
} catch (Exception e) {
// ignore
} finally {
client.close();
}
}
}
|
package org.galibier.messaging.benchmark.zookeeper;
import com.netflix.curator.framework.CuratorFramework;
import com.netflix.curator.framework.CuratorFrameworkFactory;
import com.netflix.curator.retry.RetryOneTime;
import org.apache.zookeeper.KeeperException;
import org.galibier.messaging.benchmark.Message;
import org.galibier.messaging.benchmark.Operation;
public abstract class ZKOperation implements Operation {
protected final CuratorFramework client;
protected final String path;
public ZKOperation(String host, String path) {
this.client = CuratorFrameworkFactory.newClient(host, new RetryOneTime(100));
this.path = path;
}
@Override
public void initialize() {
client.start();
try {
client.create().forPath(path, Message.getDefault());
} catch (KeeperException.NodeExistsException e) {
// ignore
} catch (Exception e) {
e.printStackTrace(System.err);
System.out.println("Initialization failed");
System.exit(1);
}
}
@Override
public void terminate() {
try {
client.delete().forPath(path);
} catch (KeeperException.NoNodeException e) {
// ignore
} catch (Exception e) {
// ignore
} finally {
client.close();
}
}
}
|
Print stack trace when initialization fails
|
Print stack trace when initialization fails
|
Java
|
mit
|
oshothebig/messaging-bench
|
java
|
## Code Before:
package org.galibier.messaging.benchmark.zookeeper;
import com.netflix.curator.framework.CuratorFramework;
import com.netflix.curator.framework.CuratorFrameworkFactory;
import com.netflix.curator.retry.RetryOneTime;
import org.apache.zookeeper.KeeperException;
import org.galibier.messaging.benchmark.Message;
import org.galibier.messaging.benchmark.Operation;
public abstract class ZKOperation implements Operation {
protected final CuratorFramework client;
protected final String path;
public ZKOperation(String host, String path) {
this.client = CuratorFrameworkFactory.newClient(host, new RetryOneTime(100));
this.path = path;
}
@Override
public void initialize() {
client.start();
try {
client.create().forPath(path, Message.getDefault());
} catch (KeeperException.NodeExistsException e) {
// ignore
} catch (Exception e) {
System.out.println("Initialization failed");
System.exit(1);
}
}
@Override
public void terminate() {
try {
client.delete().forPath(path);
} catch (KeeperException.NoNodeException e) {
// ignore
} catch (Exception e) {
// ignore
} finally {
client.close();
}
}
}
## Instruction:
Print stack trace when initialization fails
## Code After:
package org.galibier.messaging.benchmark.zookeeper;
import com.netflix.curator.framework.CuratorFramework;
import com.netflix.curator.framework.CuratorFrameworkFactory;
import com.netflix.curator.retry.RetryOneTime;
import org.apache.zookeeper.KeeperException;
import org.galibier.messaging.benchmark.Message;
import org.galibier.messaging.benchmark.Operation;
public abstract class ZKOperation implements Operation {
protected final CuratorFramework client;
protected final String path;
public ZKOperation(String host, String path) {
this.client = CuratorFrameworkFactory.newClient(host, new RetryOneTime(100));
this.path = path;
}
@Override
public void initialize() {
client.start();
try {
client.create().forPath(path, Message.getDefault());
} catch (KeeperException.NodeExistsException e) {
// ignore
} catch (Exception e) {
e.printStackTrace(System.err);
System.out.println("Initialization failed");
System.exit(1);
}
}
@Override
public void terminate() {
try {
client.delete().forPath(path);
} catch (KeeperException.NoNodeException e) {
// ignore
} catch (Exception e) {
// ignore
} finally {
client.close();
}
}
}
|
// ... existing code ...
} catch (KeeperException.NodeExistsException e) {
// ignore
} catch (Exception e) {
e.printStackTrace(System.err);
System.out.println("Initialization failed");
System.exit(1);
}
// ... rest of the code ...
|
03ee406800fb59ff3e7565397107fa9aad0d54d0
|
website/notifications/listeners.py
|
website/notifications/listeners.py
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
Revert "Remove incorrect check for institution_id"
|
Revert "Remove incorrect check for institution_id"
This reverts commit 617df13670573b858b6c23249f4287786807d8b6.
|
Python
|
apache-2.0
|
hmoco/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,cslzchen/osf.io,Nesiehr/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,chennan47/osf.io,crcresearch/osf.io,Nesiehr/osf.io,felliott/osf.io,Johnetordoff/osf.io,acshi/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,binoculars/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,sloria/osf.io,TomBaxter/osf.io,caneruguz/osf.io,adlius/osf.io,hmoco/osf.io,caneruguz/osf.io,chennan47/osf.io,chrisseto/osf.io,mattclark/osf.io,saradbowman/osf.io,aaxelb/osf.io,mfraezz/osf.io,felliott/osf.io,monikagrabowska/osf.io,sloria/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,erinspace/osf.io,erinspace/osf.io,leb2dg/osf.io,adlius/osf.io,pattisdr/osf.io,laurenrevere/osf.io,caneruguz/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,adlius/osf.io,adlius/osf.io,cwisecarver/osf.io,leb2dg/osf.io,baylee-d/osf.io,mfraezz/osf.io,caseyrollins/osf.io,mattclark/osf.io,icereval/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,leb2dg/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,felliott/osf.io,erinspace/osf.io,laurenrevere/osf.io,leb2dg/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,chennan47/osf.io,hmoco/osf.io,acshi/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,pattisdr/osf.io,icereval/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,aaxelb/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,acshi/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,chrisseto/osf.io,baylee-d/osf.io,icereval/osf.io,binoculars/osf.io,felliott/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,hmoco/osf.io,sloria/osf.io,saradbowman/osf.io,acshi/osf.io,caneruguz/osf.io,acshi/osf.io,chrisseto/osf.io,cwisecarver/osf.io
|
python
|
## Code Before:
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
## Instruction:
Revert "Remove incorrect check for institution_id"
This reverts commit 617df13670573b858b6c23249f4287786807d8b6.
## Code After:
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
# ... existing code ...
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
# ... rest of the code ...
|
a75dbd5aa5e9b84d08919ea14743afb75182ee8b
|
steel/chunks/iff.py
|
steel/chunks/iff.py
|
import collections
import io
from steel.fields.numbers import BigEndian
from steel import fields
from steel.chunks import base
__all__ = ['Chunk', 'ChunkList', 'Form']
class Chunk(base.Chunk):
id = fields.String(size=4, encoding='ascii')
size = fields.Integer(size=4, endianness=BigEndian)
payload = base.Payload(size=size)
class ChunkList(base.ChunkList):
def __init__(self, *args, **kwargs):
# Just a simple override to default to a list of IFF chunks
return super(ChunkList, self).__init__(Chunk, *args, **kwargs)
class Form(base.Chunk, encoding='ascii'):
tag = fields.FixedString('FORM')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
|
import collections
import io
from steel.fields.numbers import BigEndian
from steel import fields
from steel.chunks import base
__all__ = ['Chunk', 'ChunkList', 'List', 'Form', 'Prop']
class Chunk(base.Chunk):
id = fields.String(size=4, encoding='ascii')
size = fields.Integer(size=4, endianness=BigEndian)
payload = base.Payload(size=size)
class ChunkList(base.ChunkList):
def __init__(self, *args, **kwargs):
# Just a simple override to default to a list of IFF chunks
return super(ChunkList, self).__init__(Chunk, *args, **kwargs)
class List(base.Chunk, encoding='ascii'):
tag = fields.FixedString('LIST')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
class Form(base.Chunk, encoding='ascii'):
tag = fields.FixedString('FORM')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
class Prop(base.Chunk, encoding='ascii'):
tag = fields.FixedString('PROP')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
|
Add a List and Prop for better IFF compliance
|
Add a List and Prop for better IFF compliance
|
Python
|
bsd-3-clause
|
gulopine/steel
|
python
|
## Code Before:
import collections
import io
from steel.fields.numbers import BigEndian
from steel import fields
from steel.chunks import base
__all__ = ['Chunk', 'ChunkList', 'Form']
class Chunk(base.Chunk):
id = fields.String(size=4, encoding='ascii')
size = fields.Integer(size=4, endianness=BigEndian)
payload = base.Payload(size=size)
class ChunkList(base.ChunkList):
def __init__(self, *args, **kwargs):
# Just a simple override to default to a list of IFF chunks
return super(ChunkList, self).__init__(Chunk, *args, **kwargs)
class Form(base.Chunk, encoding='ascii'):
tag = fields.FixedString('FORM')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
## Instruction:
Add a List and Prop for better IFF compliance
## Code After:
import collections
import io
from steel.fields.numbers import BigEndian
from steel import fields
from steel.chunks import base
__all__ = ['Chunk', 'ChunkList', 'List', 'Form', 'Prop']
class Chunk(base.Chunk):
id = fields.String(size=4, encoding='ascii')
size = fields.Integer(size=4, endianness=BigEndian)
payload = base.Payload(size=size)
class ChunkList(base.ChunkList):
def __init__(self, *args, **kwargs):
# Just a simple override to default to a list of IFF chunks
return super(ChunkList, self).__init__(Chunk, *args, **kwargs)
class List(base.Chunk, encoding='ascii'):
tag = fields.FixedString('LIST')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
class Form(base.Chunk, encoding='ascii'):
tag = fields.FixedString('FORM')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
class Prop(base.Chunk, encoding='ascii'):
tag = fields.FixedString('PROP')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
|
# ... existing code ...
from steel import fields
from steel.chunks import base
__all__ = ['Chunk', 'ChunkList', 'List', 'Form', 'Prop']
class Chunk(base.Chunk):
# ... modified code ...
return super(ChunkList, self).__init__(Chunk, *args, **kwargs)
class List(base.Chunk, encoding='ascii'):
tag = fields.FixedString('LIST')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
class Form(base.Chunk, encoding='ascii'):
tag = fields.FixedString('FORM')
size = fields.Integer(size=4, endianness=BigEndian)
...
payload = base.Payload(size=size)
class Prop(base.Chunk, encoding='ascii'):
tag = fields.FixedString('PROP')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
# ... rest of the code ...
|
8d513f02f31b78a65b920293a5aeebfc0e52473d
|
app/src/main/java/net/squanchy/navigation/Navigator.java
|
app/src/main/java/net/squanchy/navigation/Navigator.java
|
package net.squanchy.navigation;
import android.content.Context;
import android.content.Intent;
import net.squanchy.eventdetails.EventDetailsActivity;
import net.squanchy.search.SearchActivity;
import net.squanchy.speaker.SpeakerDetailsActivity;
public class Navigator {
private final Context context;
public Navigator(Context context) {
this.context = context;
}
public void toEventDetails(String eventId) {
Intent intent = EventDetailsActivity.createIntent(context, eventId);
context.startActivity(intent);
}
public void toSpeakerDetails(String speakerId) {
Intent intent = SpeakerDetailsActivity.createIntent(context, speakerId);
context.startActivity(intent);
}
public void toSearch() {
context.startActivity(new Intent(context, SearchActivity.class));
}
}
|
package net.squanchy.navigation;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import net.squanchy.eventdetails.EventDetailsActivity;
import net.squanchy.search.SearchActivity;
import net.squanchy.speaker.SpeakerDetailsActivity;
public class Navigator {
private final Context context;
public Navigator(Context context) {
this.context = context;
}
public void toEventDetails(String eventId) {
Intent intent = EventDetailsActivity.createIntent(context, eventId);
context.startActivity(intent);
}
public void toSpeakerDetails(String speakerId) {
Intent intent = SpeakerDetailsActivity.createIntent(context, speakerId);
context.startActivity(intent);
}
public void toSearch() {
context.startActivity(new Intent(context, SearchActivity.class));
}
public void toTwitterProfile(String username) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("twitter://user?screen_name=" + username));
if (canResolve(intent)) {
context.startActivity(intent);
} else {
toExternalUrl("https://twitter.com/" + username);
context.startActivity(intent);
}
}
private boolean canResolve(Intent intent) {
return context.getPackageManager()
.queryIntentActivities(intent, 0)
.isEmpty();
}
public void toExternalUrl(String url) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
context.startActivity(intent);
}
}
|
Add support for navigating to Twitter profile and external URL
|
Add support for navigating to Twitter profile and external URL
|
Java
|
apache-2.0
|
squanchy-dev/squanchy-android,squanchy-dev/squanchy-android,squanchy-dev/squanchy-android
|
java
|
## Code Before:
package net.squanchy.navigation;
import android.content.Context;
import android.content.Intent;
import net.squanchy.eventdetails.EventDetailsActivity;
import net.squanchy.search.SearchActivity;
import net.squanchy.speaker.SpeakerDetailsActivity;
public class Navigator {
private final Context context;
public Navigator(Context context) {
this.context = context;
}
public void toEventDetails(String eventId) {
Intent intent = EventDetailsActivity.createIntent(context, eventId);
context.startActivity(intent);
}
public void toSpeakerDetails(String speakerId) {
Intent intent = SpeakerDetailsActivity.createIntent(context, speakerId);
context.startActivity(intent);
}
public void toSearch() {
context.startActivity(new Intent(context, SearchActivity.class));
}
}
## Instruction:
Add support for navigating to Twitter profile and external URL
## Code After:
package net.squanchy.navigation;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import net.squanchy.eventdetails.EventDetailsActivity;
import net.squanchy.search.SearchActivity;
import net.squanchy.speaker.SpeakerDetailsActivity;
public class Navigator {
private final Context context;
public Navigator(Context context) {
this.context = context;
}
public void toEventDetails(String eventId) {
Intent intent = EventDetailsActivity.createIntent(context, eventId);
context.startActivity(intent);
}
public void toSpeakerDetails(String speakerId) {
Intent intent = SpeakerDetailsActivity.createIntent(context, speakerId);
context.startActivity(intent);
}
public void toSearch() {
context.startActivity(new Intent(context, SearchActivity.class));
}
public void toTwitterProfile(String username) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("twitter://user?screen_name=" + username));
if (canResolve(intent)) {
context.startActivity(intent);
} else {
toExternalUrl("https://twitter.com/" + username);
context.startActivity(intent);
}
}
private boolean canResolve(Intent intent) {
return context.getPackageManager()
.queryIntentActivities(intent, 0)
.isEmpty();
}
public void toExternalUrl(String url) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
context.startActivity(intent);
}
}
|
// ... existing code ...
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import net.squanchy.eventdetails.EventDetailsActivity;
import net.squanchy.search.SearchActivity;
// ... modified code ...
public void toSearch() {
context.startActivity(new Intent(context, SearchActivity.class));
}
public void toTwitterProfile(String username) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("twitter://user?screen_name=" + username));
if (canResolve(intent)) {
context.startActivity(intent);
} else {
toExternalUrl("https://twitter.com/" + username);
context.startActivity(intent);
}
}
private boolean canResolve(Intent intent) {
return context.getPackageManager()
.queryIntentActivities(intent, 0)
.isEmpty();
}
public void toExternalUrl(String url) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
context.startActivity(intent);
}
}
// ... rest of the code ...
|
23e57facea49ebc093d1da7a9ae6857cd2c8dad7
|
warehouse/defaults.py
|
warehouse/defaults.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The URI for our Redis database.
REDIS_URI = "redis://localhost:6379/0"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
Add an explicit default for REDIS_URI
|
Add an explicit default for REDIS_URI
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
python
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
## Instruction:
Add an explicit default for REDIS_URI
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The URI for our Redis database.
REDIS_URI = "redis://localhost:6379/0"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
# ... existing code ...
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The URI for our Redis database.
REDIS_URI = "redis://localhost:6379/0"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# ... rest of the code ...
|
d0472714a97af951250f68c7c22e57448f689291
|
setup.py
|
setup.py
|
from setuptools import setup
import os
data = list()
for d in os.walk('markdo/'):
if len(d[2]) > 0:
path_list = map(
lambda x: str.join('/', os.path.join(d[0], x).split('/')[1:]),
d[2]
)
data.extend(path_list)
setup(
name="MarkDo",
version="0.2",
author="Nitipit Nontasuwan",
author_email="[email protected]",
url="http://nitipit.github.com/markdo/",
license="MIT",
description="Markdown editor for Gnome",
platforms=['linux'],
keywords=['editor', 'markdown'],
package_dir={'markdo': 'markdo'},
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
install_requires=['appkit==0.2'],
)
|
from setuptools import setup
import os
data = list()
for d in os.walk('markdo/'):
if len(d[2]) > 0:
path_list = map(
lambda x: str.join('/', os.path.join(d[0], x).split('/')[1:]),
d[2]
)
data.extend(path_list)
setup(
name="MarkDo",
version="0.2",
author="Nitipit Nontasuwan",
author_email="[email protected]",
url="http://nitipit.github.com/markdo/",
license="MIT",
description="Markdown editor for Gnome",
platforms=['linux'],
keywords=['editor', 'markdown'],
package_dir={'markdo': 'markdo'},
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
install_requires=['AppKit==0.2', 'Jinja2'],
)
|
Add jinja2 as a required lib
|
Add jinja2 as a required lib
|
Python
|
mit
|
nitipit/markdo,nitipit/markdo,nitipit/markdo
|
python
|
## Code Before:
from setuptools import setup
import os
data = list()
for d in os.walk('markdo/'):
if len(d[2]) > 0:
path_list = map(
lambda x: str.join('/', os.path.join(d[0], x).split('/')[1:]),
d[2]
)
data.extend(path_list)
setup(
name="MarkDo",
version="0.2",
author="Nitipit Nontasuwan",
author_email="[email protected]",
url="http://nitipit.github.com/markdo/",
license="MIT",
description="Markdown editor for Gnome",
platforms=['linux'],
keywords=['editor', 'markdown'],
package_dir={'markdo': 'markdo'},
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
install_requires=['appkit==0.2'],
)
## Instruction:
Add jinja2 as a required lib
## Code After:
from setuptools import setup
import os
data = list()
for d in os.walk('markdo/'):
if len(d[2]) > 0:
path_list = map(
lambda x: str.join('/', os.path.join(d[0], x).split('/')[1:]),
d[2]
)
data.extend(path_list)
setup(
name="MarkDo",
version="0.2",
author="Nitipit Nontasuwan",
author_email="[email protected]",
url="http://nitipit.github.com/markdo/",
license="MIT",
description="Markdown editor for Gnome",
platforms=['linux'],
keywords=['editor', 'markdown'],
package_dir={'markdo': 'markdo'},
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
install_requires=['AppKit==0.2', 'Jinja2'],
)
|
# ... existing code ...
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
install_requires=['AppKit==0.2', 'Jinja2'],
)
# ... rest of the code ...
|
a2940d4b51b95ce51667bbafe2205ed1bcd15d2f
|
app/src/main/java/org/stepik/android/data/course/repository/CoursePurchaseDataRepositoryImpl.kt
|
app/src/main/java/org/stepik/android/data/course/repository/CoursePurchaseDataRepositoryImpl.kt
|
package org.stepik.android.data.course.repository
import org.stepik.android.domain.course.repository.CoursePurchaseDataRepository
import org.stepik.android.domain.course_payments.model.DeeplinkPromoCode
import org.stepik.android.presentation.course_purchase.model.CoursePurchaseDataResult
import javax.inject.Inject
class CoursePurchaseDataRepositoryImpl
@Inject
constructor() : CoursePurchaseDataRepository {
private var deeplinkPromoCode: DeeplinkPromoCode = DeeplinkPromoCode.EMPTY
private var coursePurchaseDataResult: CoursePurchaseDataResult = CoursePurchaseDataResult.Empty
@Synchronized
override fun getDeeplinkPromoCode(): DeeplinkPromoCode =
deeplinkPromoCode
@Synchronized
override fun getCoursePurchaseData(): CoursePurchaseDataResult =
coursePurchaseDataResult
override fun savePurchaseData(deeplinkPromoCode: DeeplinkPromoCode, coursePurchaseDataResult: CoursePurchaseDataResult) {
this.deeplinkPromoCode = deeplinkPromoCode
this.coursePurchaseDataResult = coursePurchaseDataResult
}
}
|
package org.stepik.android.data.course.repository
import org.stepik.android.domain.course.repository.CoursePurchaseDataRepository
import org.stepik.android.domain.course_payments.model.DeeplinkPromoCode
import org.stepik.android.presentation.course_purchase.model.CoursePurchaseDataResult
import javax.inject.Inject
class CoursePurchaseDataRepositoryImpl
@Inject
constructor() : CoursePurchaseDataRepository {
private var deeplinkPromoCode: DeeplinkPromoCode = DeeplinkPromoCode.EMPTY
private var coursePurchaseDataResult: CoursePurchaseDataResult = CoursePurchaseDataResult.Empty
@Synchronized
override fun getDeeplinkPromoCode(): DeeplinkPromoCode =
deeplinkPromoCode
@Synchronized
override fun getCoursePurchaseData(): CoursePurchaseDataResult =
coursePurchaseDataResult
@Synchronized
override fun savePurchaseData(deeplinkPromoCode: DeeplinkPromoCode, coursePurchaseDataResult: CoursePurchaseDataResult) {
this.deeplinkPromoCode = deeplinkPromoCode
this.coursePurchaseDataResult = coursePurchaseDataResult
}
}
|
Add synchronized to save operation
|
Add synchronized to save operation
|
Kotlin
|
apache-2.0
|
StepicOrg/stepik-android,StepicOrg/stepik-android,StepicOrg/stepik-android
|
kotlin
|
## Code Before:
package org.stepik.android.data.course.repository
import org.stepik.android.domain.course.repository.CoursePurchaseDataRepository
import org.stepik.android.domain.course_payments.model.DeeplinkPromoCode
import org.stepik.android.presentation.course_purchase.model.CoursePurchaseDataResult
import javax.inject.Inject
class CoursePurchaseDataRepositoryImpl
@Inject
constructor() : CoursePurchaseDataRepository {
private var deeplinkPromoCode: DeeplinkPromoCode = DeeplinkPromoCode.EMPTY
private var coursePurchaseDataResult: CoursePurchaseDataResult = CoursePurchaseDataResult.Empty
@Synchronized
override fun getDeeplinkPromoCode(): DeeplinkPromoCode =
deeplinkPromoCode
@Synchronized
override fun getCoursePurchaseData(): CoursePurchaseDataResult =
coursePurchaseDataResult
override fun savePurchaseData(deeplinkPromoCode: DeeplinkPromoCode, coursePurchaseDataResult: CoursePurchaseDataResult) {
this.deeplinkPromoCode = deeplinkPromoCode
this.coursePurchaseDataResult = coursePurchaseDataResult
}
}
## Instruction:
Add synchronized to save operation
## Code After:
package org.stepik.android.data.course.repository
import org.stepik.android.domain.course.repository.CoursePurchaseDataRepository
import org.stepik.android.domain.course_payments.model.DeeplinkPromoCode
import org.stepik.android.presentation.course_purchase.model.CoursePurchaseDataResult
import javax.inject.Inject
class CoursePurchaseDataRepositoryImpl
@Inject
constructor() : CoursePurchaseDataRepository {
private var deeplinkPromoCode: DeeplinkPromoCode = DeeplinkPromoCode.EMPTY
private var coursePurchaseDataResult: CoursePurchaseDataResult = CoursePurchaseDataResult.Empty
@Synchronized
override fun getDeeplinkPromoCode(): DeeplinkPromoCode =
deeplinkPromoCode
@Synchronized
override fun getCoursePurchaseData(): CoursePurchaseDataResult =
coursePurchaseDataResult
@Synchronized
override fun savePurchaseData(deeplinkPromoCode: DeeplinkPromoCode, coursePurchaseDataResult: CoursePurchaseDataResult) {
this.deeplinkPromoCode = deeplinkPromoCode
this.coursePurchaseDataResult = coursePurchaseDataResult
}
}
|
// ... existing code ...
override fun getCoursePurchaseData(): CoursePurchaseDataResult =
coursePurchaseDataResult
@Synchronized
override fun savePurchaseData(deeplinkPromoCode: DeeplinkPromoCode, coursePurchaseDataResult: CoursePurchaseDataResult) {
this.deeplinkPromoCode = deeplinkPromoCode
this.coursePurchaseDataResult = coursePurchaseDataResult
// ... rest of the code ...
|
40bc1f50e7b0605522feb4ac86daebb9f785eb88
|
test/OLItest/globals.py
|
test/OLItest/globals.py
|
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
|
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
|
Use only 1 IMAP connection by default
|
tests: Use only 1 IMAP connection by default
We don't want to hammmer IMAP servers for the test series too much
to avoid being locked out. We will need a few tests to test
concurrent connections, but by default one connection should be fine.
Signed-off-by: Sebastian Spaeth <[email protected]>
|
Python
|
apache-2.0
|
frioux/offlineimap,frioux/offlineimap
|
python
|
## Code Before:
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
## Instruction:
tests: Use only 1 IMAP connection by default
We don't want to hammmer IMAP servers for the test series too much
to avoid being locked out. We will need a few tests to test
concurrent connections, but by default one connection should be fine.
Signed-off-by: Sebastian Spaeth <[email protected]>
## Code After:
from cStringIO import StringIO
default_conf=StringIO("""[general]
#will be set automatically
metadata =
accounts = test
ui = quiet
[Account test]
localrepository = Maildir
remoterepository = IMAP
[Repository Maildir]
Type = Maildir
# will be set automatically during tests
localfolders =
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
|
# ... existing code ...
[Repository IMAP]
type=IMAP
# Don't hammer the server with too many connection attempts:
maxconnections=1
folderfilter= lambda f: f.startswith('INBOX.OLItest')
""")
# ... rest of the code ...
|
734253d6b66e8268e130d27cbbc95fcc0365cc0e
|
java/interview/bipartition-pattern-matching.java
|
java/interview/bipartition-pattern-matching.java
|
/**
* @Company Facebook
*
* Given two strings, str and pattern, return true if the str has a valid mapping between the words in str and the characters in the pattern.
* The str will be whitespace separated and the pattern will be depicted using characters.
*
* Problem Ex.
* Input: 'cat dog dog cat', 'abba'
* Output: true because cat = a and dog = b.
*
* Input: 'cat dog dog foo', 'abba'
* Output: false because cat = a so foo cannot == a
*/
public boolean isValidPattern(String str, String pattern) { }
|
/**
* @Company Facebook
*
* Given two strings, str and pattern, return true if the str has a valid mapping between the words in str and the characters in the pattern.
* The str will be whitespace separated and the pattern will be depicted using characters.
*
* Problem Ex.
* Input: 'cat dog dog cat', 'abba'
* Output: true because cat = a and dog = b.
*
* Input: 'cat dog dog foo', 'abba'
* Output: false because cat = a so foo cannot == a
*
* Note: use may assume the pattern string will only contain lowercase alphabet letters.
*/
public boolean wordPattern(String pattern, String str) {
// map each letter to a string.
String[] strs = str.split("\\s");
// character -> string mapping
String[] map = new String[26];
HashSet<String> set = new HashSet<String>();
// Must be equal in length.
if (strs.length != pattern.length()) return false;
for (int i = 0; i < strs.length; i ++) {
String string = strs[i];
char c = pattern.charAt(i);
int index = c - 'a';
// First occurence of the character.
if (map[index] == null && !set.contains(string)) {
map[index] = string;
set.add(string);
// Valid pattern match
} else if (map[index] != null && map[index].equals(string)) {
continue;
} else {
return false;
}
}
return true;
}
|
Add bipartite pattern matching solution
|
Add bipartite pattern matching solution
|
Java
|
mit
|
vinnyoodles/algorithms,vinnyoodles/algorithms,vinnyoodles/algorithms
|
java
|
## Code Before:
/**
* @Company Facebook
*
* Given two strings, str and pattern, return true if the str has a valid mapping between the words in str and the characters in the pattern.
* The str will be whitespace separated and the pattern will be depicted using characters.
*
* Problem Ex.
* Input: 'cat dog dog cat', 'abba'
* Output: true because cat = a and dog = b.
*
* Input: 'cat dog dog foo', 'abba'
* Output: false because cat = a so foo cannot == a
*/
public boolean isValidPattern(String str, String pattern) { }
## Instruction:
Add bipartite pattern matching solution
## Code After:
/**
* @Company Facebook
*
* Given two strings, str and pattern, return true if the str has a valid mapping between the words in str and the characters in the pattern.
* The str will be whitespace separated and the pattern will be depicted using characters.
*
* Problem Ex.
* Input: 'cat dog dog cat', 'abba'
* Output: true because cat = a and dog = b.
*
* Input: 'cat dog dog foo', 'abba'
* Output: false because cat = a so foo cannot == a
*
* Note: use may assume the pattern string will only contain lowercase alphabet letters.
*/
public boolean wordPattern(String pattern, String str) {
// map each letter to a string.
String[] strs = str.split("\\s");
// character -> string mapping
String[] map = new String[26];
HashSet<String> set = new HashSet<String>();
// Must be equal in length.
if (strs.length != pattern.length()) return false;
for (int i = 0; i < strs.length; i ++) {
String string = strs[i];
char c = pattern.charAt(i);
int index = c - 'a';
// First occurence of the character.
if (map[index] == null && !set.contains(string)) {
map[index] = string;
set.add(string);
// Valid pattern match
} else if (map[index] != null && map[index].equals(string)) {
continue;
} else {
return false;
}
}
return true;
}
|
// ... existing code ...
*
* Input: 'cat dog dog foo', 'abba'
* Output: false because cat = a so foo cannot == a
*
* Note: use may assume the pattern string will only contain lowercase alphabet letters.
*/
public boolean wordPattern(String pattern, String str) {
// map each letter to a string.
String[] strs = str.split("\\s");
// character -> string mapping
String[] map = new String[26];
HashSet<String> set = new HashSet<String>();
// Must be equal in length.
if (strs.length != pattern.length()) return false;
for (int i = 0; i < strs.length; i ++) {
String string = strs[i];
char c = pattern.charAt(i);
int index = c - 'a';
// First occurence of the character.
if (map[index] == null && !set.contains(string)) {
map[index] = string;
set.add(string);
// Valid pattern match
} else if (map[index] != null && map[index].equals(string)) {
continue;
} else {
return false;
}
}
return true;
}
// ... rest of the code ...
|
70cc77a9146f9d4afd78df9a2f8da8673f0320de
|
extractor.py
|
extractor.py
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
## root.mainloop()
ui.MainApplication().mainloop()
main()
|
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
|
Split program call into two lines.
|
Split program call into two lines.
|
Python
|
mit
|
adambiser/snes-wolf3d-extractor
|
python
|
## Code Before:
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
## root.mainloop()
ui.MainApplication().mainloop()
main()
## Instruction:
Split program call into two lines.
## Code After:
import extractor.ui.mainapplication as ui
import Tkinter as tk
def main():
## root = tk.Tk()
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
|
...
## root.title('SNES Wolfenstein 3D Extractor')
## root.minsize(400, 100)
## ui.MainApplication(root).pack(side="top", fill="both", expand=True)
root = ui.MainApplication()
root.mainloop()
main()
...
|
24c6b759c1e8898946cdae591bce236e3ddbc2d8
|
topStocks.py
|
topStocks.py
|
"""Find top stocks and post them to Twitter."""
import sys
import tweetPoster
import stockPrices
from stockList import getStockList
import time
def main():
# Get the list of stock symobls
currentStockList = getStockList()
# Get the stock prices
oldStockPrices = stockPrices.getStockPrices(currentStockList)
# Wait a while for the stock prices to change
print("Sleeping...")
time.sleep(30)
print("Done sleeping.")
# Get the new stock prices
currentStockPrices = stockPrices.getStockPrices(currentStockList)
# Find the fastest-changing stocks
topStockPrices = stockPrices.getFastestChangingPrices(currentStockPrices, oldStockPrices)
# Format the tweet text
tweetText = ""
for stockPrice in topStockPrices:
symbol = stockPrice[0]
percentage = round(float(stockPrice[1]), 2)
if percentage > 0:
percentage = '+' + str(percentage)
tweetText = tweetText + '#' + symbol + " : " + percentage + '%, '
# Post a tweet of the top stocks
print tweetText
tweetPoster.postTweet(tweetText)
if __name__ == '__main__':
sys.exit(main())
|
"""Find top stocks and post them to Twitter."""
import sys
import tweetPoster
import stockPrices
from stockList import getStockList
import time
def main():
# Get the list of stock symobls
currentStockList = getStockList()
# Get the stock prices
oldStockPrices = stockPrices.getStockPrices(currentStockList)
# Wait a while for the stock prices to change
print("Sleeping...")
time.sleep(30)
print("Done sleeping.")
# Get the new stock prices
currentStockPrices = stockPrices.getStockPrices(currentStockList)
# Find the fastest-changing stocks
topStockPrices = stockPrices.getFastestChangingPrices(currentStockPrices, oldStockPrices)
# Format the tweet text
tweetText = ""
for stockPrice in topStockPrices:
symbol = stockPrice[0]
percentage = round(float(stockPrice[1]), 2)
if percentage > 0:
percentage = '+' + str(percentage)
else:
percentage = str(percentage)
tweetText = tweetText + '#' + symbol + " : " + percentage + '%, '
# Post a tweet of the top stocks
print tweetText
tweetPoster.postTweet(tweetText)
if __name__ == '__main__':
sys.exit(main())
|
Make sure "percentage" is a string before concatenating it.
|
Make sure "percentage" is a string before concatenating it.
|
Python
|
mit
|
trswany/topStocks
|
python
|
## Code Before:
"""Find top stocks and post them to Twitter."""
import sys
import tweetPoster
import stockPrices
from stockList import getStockList
import time
def main():
# Get the list of stock symobls
currentStockList = getStockList()
# Get the stock prices
oldStockPrices = stockPrices.getStockPrices(currentStockList)
# Wait a while for the stock prices to change
print("Sleeping...")
time.sleep(30)
print("Done sleeping.")
# Get the new stock prices
currentStockPrices = stockPrices.getStockPrices(currentStockList)
# Find the fastest-changing stocks
topStockPrices = stockPrices.getFastestChangingPrices(currentStockPrices, oldStockPrices)
# Format the tweet text
tweetText = ""
for stockPrice in topStockPrices:
symbol = stockPrice[0]
percentage = round(float(stockPrice[1]), 2)
if percentage > 0:
percentage = '+' + str(percentage)
tweetText = tweetText + '#' + symbol + " : " + percentage + '%, '
# Post a tweet of the top stocks
print tweetText
tweetPoster.postTweet(tweetText)
if __name__ == '__main__':
sys.exit(main())
## Instruction:
Make sure "percentage" is a string before concatenating it.
## Code After:
"""Find top stocks and post them to Twitter."""
import sys
import tweetPoster
import stockPrices
from stockList import getStockList
import time
def main():
# Get the list of stock symobls
currentStockList = getStockList()
# Get the stock prices
oldStockPrices = stockPrices.getStockPrices(currentStockList)
# Wait a while for the stock prices to change
print("Sleeping...")
time.sleep(30)
print("Done sleeping.")
# Get the new stock prices
currentStockPrices = stockPrices.getStockPrices(currentStockList)
# Find the fastest-changing stocks
topStockPrices = stockPrices.getFastestChangingPrices(currentStockPrices, oldStockPrices)
# Format the tweet text
tweetText = ""
for stockPrice in topStockPrices:
symbol = stockPrice[0]
percentage = round(float(stockPrice[1]), 2)
if percentage > 0:
percentage = '+' + str(percentage)
else:
percentage = str(percentage)
tweetText = tweetText + '#' + symbol + " : " + percentage + '%, '
# Post a tweet of the top stocks
print tweetText
tweetPoster.postTweet(tweetText)
if __name__ == '__main__':
sys.exit(main())
|
# ... existing code ...
percentage = round(float(stockPrice[1]), 2)
if percentage > 0:
percentage = '+' + str(percentage)
else:
percentage = str(percentage)
tweetText = tweetText + '#' + symbol + " : " + percentage + '%, '
# Post a tweet of the top stocks
# ... rest of the code ...
|
ad6b7fe871be502220de5bcb6c2a65f4e7999294
|
etcd3/client.py
|
etcd3/client.py
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
Add compact and delete stubs
|
Add compact and delete stubs
|
Python
|
apache-2.0
|
kragniz/python-etcd3
|
python
|
## Code Before:
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
## Instruction:
Add compact and delete stubs
## Code After:
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
# ... existing code ...
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
# ... rest of the code ...
|
226f2a5674c9d1d16801cfe7b8c5ac636e849b4a
|
setup.py
|
setup.py
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
'pytest-runner',
],
tests_require=[
"pytest",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="[email protected]",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="[email protected]",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
Remove pytest-runner from install requirements, add numpy as test requirement
|
Remove pytest-runner from install requirements, add numpy as test requirement
|
Python
|
mit
|
lukasschwab/arxiv.py
|
python
|
## Code Before:
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
'pytest-runner',
],
tests_require=[
"pytest",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="[email protected]",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
## Instruction:
Remove pytest-runner from install requirements, add numpy as test requirement
## Code After:
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="[email protected]",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
// ... existing code ...
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
// ... rest of the code ...
|
0491590da07bea69328cca93925a80fbf1c6ed66
|
models/src/main/java/com/vimeo/networking2/FolderInteractions.kt
|
models/src/main/java/com/vimeo/networking2/FolderInteractions.kt
|
package com.vimeo.networking2
import com.squareup.moshi.Json
import com.squareup.moshi.JsonClass
/**
* The interactions for a folder.
*
* @param addSubfolder The interaction used to add a subfolder as well as determine capability for adding subfolders.
* @param deleteVideo The interaction that shows whether the user can delete videos from the folder.
* @param edit The interaction that shows whether the user can edit the folder's settings.
* @param invite The interaction that shows whether the user can invite other users to manage the folder.
*/
@JsonClass(generateAdapter = true)
data class FolderInteractions(
@Json(name = "add_subfolder")
val addSubfolder: AddSubfolderInteraction? = null,
@Json(name = "delete_video")
val deleteVideo: BasicInteraction? = null,
@Json(name = "edit")
val edit: BasicInteraction? = null,
@Json(name = "invite")
val invite: BasicInteraction? = null,
)
|
package com.vimeo.networking2
import com.squareup.moshi.Json
import com.squareup.moshi.JsonClass
/**
* The interactions for a folder.
*
* @param addRemoveVideos The interaction used to determine if the user can add to or remove videos from the folder.
* @param addSubfolder The interaction used to add a subfolder as well as determine capability for adding subfolders.
* @param deleteVideo The interaction that shows whether the user can delete videos from the folder.
* @param invite The interaction that shows whether the user can invite other users to manage the folder.
*/
@JsonClass(generateAdapter = true)
data class FolderInteractions(
@Json(name = "edit")
val addRemoveVideos: BasicInteraction? = null,
@Json(name = "add_subfolder")
val addSubfolder: AddSubfolderInteraction? = null,
@Json(name = "delete_video")
val deleteVideo: BasicInteraction? = null,
@Json(name = "invite")
val invite: BasicInteraction? = null,
)
|
Rename edit interaction to addRemoveVideos to be more clear about its purpose
|
Rename edit interaction to addRemoveVideos to be more clear about its purpose
|
Kotlin
|
mit
|
vimeo/vimeo-networking-java,vimeo/vimeo-networking-java,vimeo/vimeo-networking-java
|
kotlin
|
## Code Before:
package com.vimeo.networking2
import com.squareup.moshi.Json
import com.squareup.moshi.JsonClass
/**
* The interactions for a folder.
*
* @param addSubfolder The interaction used to add a subfolder as well as determine capability for adding subfolders.
* @param deleteVideo The interaction that shows whether the user can delete videos from the folder.
* @param edit The interaction that shows whether the user can edit the folder's settings.
* @param invite The interaction that shows whether the user can invite other users to manage the folder.
*/
@JsonClass(generateAdapter = true)
data class FolderInteractions(
@Json(name = "add_subfolder")
val addSubfolder: AddSubfolderInteraction? = null,
@Json(name = "delete_video")
val deleteVideo: BasicInteraction? = null,
@Json(name = "edit")
val edit: BasicInteraction? = null,
@Json(name = "invite")
val invite: BasicInteraction? = null,
)
## Instruction:
Rename edit interaction to addRemoveVideos to be more clear about its purpose
## Code After:
package com.vimeo.networking2
import com.squareup.moshi.Json
import com.squareup.moshi.JsonClass
/**
* The interactions for a folder.
*
* @param addRemoveVideos The interaction used to determine if the user can add to or remove videos from the folder.
* @param addSubfolder The interaction used to add a subfolder as well as determine capability for adding subfolders.
* @param deleteVideo The interaction that shows whether the user can delete videos from the folder.
* @param invite The interaction that shows whether the user can invite other users to manage the folder.
*/
@JsonClass(generateAdapter = true)
data class FolderInteractions(
@Json(name = "edit")
val addRemoveVideos: BasicInteraction? = null,
@Json(name = "add_subfolder")
val addSubfolder: AddSubfolderInteraction? = null,
@Json(name = "delete_video")
val deleteVideo: BasicInteraction? = null,
@Json(name = "invite")
val invite: BasicInteraction? = null,
)
|
// ... existing code ...
/**
* The interactions for a folder.
*
* @param addRemoveVideos The interaction used to determine if the user can add to or remove videos from the folder.
* @param addSubfolder The interaction used to add a subfolder as well as determine capability for adding subfolders.
* @param deleteVideo The interaction that shows whether the user can delete videos from the folder.
* @param invite The interaction that shows whether the user can invite other users to manage the folder.
*/
@JsonClass(generateAdapter = true)
data class FolderInteractions(
@Json(name = "edit")
val addRemoveVideos: BasicInteraction? = null,
@Json(name = "add_subfolder")
val addSubfolder: AddSubfolderInteraction? = null,
// ... modified code ...
@Json(name = "delete_video")
val deleteVideo: BasicInteraction? = null,
@Json(name = "invite")
val invite: BasicInteraction? = null,
// ... rest of the code ...
|
fc89323210a5f3f53808f7d801705d6b8c0a4224
|
test/Analysis/unix-fns.c
|
test/Analysis/unix-fns.c
|
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=region
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=basic
#include <fcntl.h>
void test_open(const char *path) {
int fd;
fd = open(path, O_RDONLY); // no-warning
if (!fd)
close(fd);
fd = open(path, O_CREAT); // expected-warning{{Call to 'open' requires a third argument when the 'O_CREAT' flag is set}}
if (!fd)
close(fd);
}
|
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=region
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=basic
#ifndef O_CREAT
#define O_CREAT 0x0200
#define O_RDONLY 0x0000
#endif
int open(const char *, int, ...);
void test_open(const char *path) {
int fd;
fd = open(path, O_RDONLY); // no-warning
if (!fd)
close(fd);
fd = open(path, O_CREAT); // expected-warning{{Call to 'open' requires a third argument when the 'O_CREAT' flag is set}}
if (!fd)
close(fd);
}
|
Remove test case dependancy on platform headers.
|
Remove test case dependancy on platform headers.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@97088 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=region
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=basic
#include <fcntl.h>
void test_open(const char *path) {
int fd;
fd = open(path, O_RDONLY); // no-warning
if (!fd)
close(fd);
fd = open(path, O_CREAT); // expected-warning{{Call to 'open' requires a third argument when the 'O_CREAT' flag is set}}
if (!fd)
close(fd);
}
## Instruction:
Remove test case dependancy on platform headers.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@97088 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=region
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=basic
#ifndef O_CREAT
#define O_CREAT 0x0200
#define O_RDONLY 0x0000
#endif
int open(const char *, int, ...);
void test_open(const char *path) {
int fd;
fd = open(path, O_RDONLY); // no-warning
if (!fd)
close(fd);
fd = open(path, O_CREAT); // expected-warning{{Call to 'open' requires a third argument when the 'O_CREAT' flag is set}}
if (!fd)
close(fd);
}
|
...
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=region
// RUN: %clang_cc1 -analyze -analyzer-check-objc-mem %s -analyzer-store=basic
#ifndef O_CREAT
#define O_CREAT 0x0200
#define O_RDONLY 0x0000
#endif
int open(const char *, int, ...);
void test_open(const char *path) {
int fd;
...
|
225abbf06472fe7afd15252ca446456c4caed0bb
|
contact/test_settings.py
|
contact/test_settings.py
|
import os
CONTACT_EMAILS = ['[email protected]']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
|
import os
CONTACT_EMAILS = ['[email protected]']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
|
Update test settings for Django >= 1.8.
|
Update test settings for Django >= 1.8.
|
Python
|
bsd-3-clause
|
aaugustin/myks-contact,aaugustin/myks-contact
|
python
|
## Code Before:
import os
CONTACT_EMAILS = ['[email protected]']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
## Instruction:
Update test settings for Django >= 1.8.
## Code After:
import os
CONTACT_EMAILS = ['[email protected]']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
|
// ... existing code ...
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
// ... modified code ...
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
// ... rest of the code ...
|
4eada6970d72b3863104790229286edf8d17720c
|
accelerator/tests/contexts/user_role_context.py
|
accelerator/tests/contexts/user_role_context.py
|
from builtins import object
from accelerator.tests.factories import (
ExpertFactory,
ProgramFactory,
ProgramRoleFactory,
ProgramRoleGrantFactory,
UserRoleFactory,
)
class UserRoleContext(object):
def __init__(self, user_role_name, program=None, user=None):
if user and not program:
self.program = user.get_profile().current_program
else:
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
self.user_role = UserRoleFactory(name=user_role_name)
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
|
from builtins import object
from accelerator.tests.factories import (
ExpertFactory,
ProgramFactory,
ProgramRoleFactory,
ProgramRoleGrantFactory,
UserRoleFactory,
)
from accelerator.models import UserRole
class UserRoleContext(object):
def __init__(self, user_role_name, program=None, user=None):
if user and not program:
self.program = user.get_profile().current_program
else:
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
self.user_role = _user_role_for_name(user_role_name)
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
def _user_role_for_name(user_role_name):
return (UserRole.objects.filter(name=user_role_name).first() or
UserRoleFactory(name=user_role_name))
|
Make UserRoleContext safe to use
|
[AC-7397] Make UserRoleContext safe to use
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
python
|
## Code Before:
from builtins import object
from accelerator.tests.factories import (
ExpertFactory,
ProgramFactory,
ProgramRoleFactory,
ProgramRoleGrantFactory,
UserRoleFactory,
)
class UserRoleContext(object):
def __init__(self, user_role_name, program=None, user=None):
if user and not program:
self.program = user.get_profile().current_program
else:
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
self.user_role = UserRoleFactory(name=user_role_name)
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
## Instruction:
[AC-7397] Make UserRoleContext safe to use
## Code After:
from builtins import object
from accelerator.tests.factories import (
ExpertFactory,
ProgramFactory,
ProgramRoleFactory,
ProgramRoleGrantFactory,
UserRoleFactory,
)
from accelerator.models import UserRole
class UserRoleContext(object):
def __init__(self, user_role_name, program=None, user=None):
if user and not program:
self.program = user.get_profile().current_program
else:
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
self.user_role = _user_role_for_name(user_role_name)
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
def _user_role_for_name(user_role_name):
return (UserRole.objects.filter(name=user_role_name).first() or
UserRoleFactory(name=user_role_name))
|
# ... existing code ...
ProgramRoleGrantFactory,
UserRoleFactory,
)
from accelerator.models import UserRole
class UserRoleContext(object):
# ... modified code ...
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
self.user_role = _user_role_for_name(user_role_name)
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
def _user_role_for_name(user_role_name):
return (UserRole.objects.filter(name=user_role_name).first() or
UserRoleFactory(name=user_role_name))
# ... rest of the code ...
|
f117facb5ade615965bdd76a870659fe1f62f302
|
test/Analysis/uninit-vals-ps-region.c
|
test/Analysis/uninit-vals-ps-region.c
|
// RUN: clang -checker-simple -analyzer-store-region -verify %s
struct s {
int data;
};
struct s global;
void g(int);
void f4() {
int a;
if (global.data == 0)
a = 3;
if (global.data == 0)
g(a); // no-warning
}
|
// RUN: clang -checker-simple -analyzer-store-region -verify %s
struct s {
int data;
};
struct s global;
void g(int);
void f4() {
int a;
if (global.data == 0)
a = 3;
if (global.data == 0) // The true branch is infeasible.
g(a); // no-warning
}
|
Add comment to test case for documentation.
|
Add comment to test case for documentation.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@60521 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: clang -checker-simple -analyzer-store-region -verify %s
struct s {
int data;
};
struct s global;
void g(int);
void f4() {
int a;
if (global.data == 0)
a = 3;
if (global.data == 0)
g(a); // no-warning
}
## Instruction:
Add comment to test case for documentation.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@60521 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: clang -checker-simple -analyzer-store-region -verify %s
struct s {
int data;
};
struct s global;
void g(int);
void f4() {
int a;
if (global.data == 0)
a = 3;
if (global.data == 0) // The true branch is infeasible.
g(a); // no-warning
}
|
// ... existing code ...
int a;
if (global.data == 0)
a = 3;
if (global.data == 0) // The true branch is infeasible.
g(a); // no-warning
}
// ... rest of the code ...
|
6f6199240009ac91da7e663030125df439d8fe7e
|
tests/test_trust_list.py
|
tests/test_trust_list.py
|
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import sys
from oscrypto import trust_list
from asn1crypto.x509 import Certificate
if sys.version_info < (3,):
byte_cls = str
else:
byte_cls = bytes
class TrustListTests(unittest.TestCase):
def test_extract_from_system(self):
certs = trust_list.extract_from_system()
self.assertIsInstance(certs, list)
for cert in certs:
self.assertIsInstance(cert, byte_cls)
_ = Certificate.load(cert).native
|
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import sys
from oscrypto import trust_list
from asn1crypto.x509 import Certificate
if sys.version_info < (3,):
byte_cls = str
else:
byte_cls = bytes
class TrustListTests(unittest.TestCase):
def test_extract_from_system(self):
certs = trust_list.extract_from_system()
self.assertIsInstance(certs, list)
self.assertLess(10, len(certs))
for cert in certs:
self.assertIsInstance(cert, byte_cls)
_ = Certificate.load(cert).native
|
Add more sanity checks to the trust list test
|
Add more sanity checks to the trust list test
|
Python
|
mit
|
wbond/oscrypto
|
python
|
## Code Before:
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import sys
from oscrypto import trust_list
from asn1crypto.x509 import Certificate
if sys.version_info < (3,):
byte_cls = str
else:
byte_cls = bytes
class TrustListTests(unittest.TestCase):
def test_extract_from_system(self):
certs = trust_list.extract_from_system()
self.assertIsInstance(certs, list)
for cert in certs:
self.assertIsInstance(cert, byte_cls)
_ = Certificate.load(cert).native
## Instruction:
Add more sanity checks to the trust list test
## Code After:
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import sys
from oscrypto import trust_list
from asn1crypto.x509 import Certificate
if sys.version_info < (3,):
byte_cls = str
else:
byte_cls = bytes
class TrustListTests(unittest.TestCase):
def test_extract_from_system(self):
certs = trust_list.extract_from_system()
self.assertIsInstance(certs, list)
self.assertLess(10, len(certs))
for cert in certs:
self.assertIsInstance(cert, byte_cls)
_ = Certificate.load(cert).native
|
// ... existing code ...
def test_extract_from_system(self):
certs = trust_list.extract_from_system()
self.assertIsInstance(certs, list)
self.assertLess(10, len(certs))
for cert in certs:
self.assertIsInstance(cert, byte_cls)
_ = Certificate.load(cert).native
// ... rest of the code ...
|
cbae962b77b7277f5904279a5418a53e38148f2c
|
karspexet/show/models.py
|
karspexet/show/models.py
|
from django.db import models
import datetime
class Production(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True)
def __str__(self):
return self.name
class Show(models.Model):
production = models.ForeignKey(Production, on_delete=models.PROTECT)
date = models.DateTimeField()
venue = models.ForeignKey('venue.Venue', on_delete=models.PROTECT)
@staticmethod
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
def __str__(self):
return self.production.name + " " + self.date_string()
class Meta:
ordering = ('date',)
|
from django.db import models
import datetime
class Production(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True)
def __str__(self):
return self.name
class Show(models.Model):
production = models.ForeignKey(Production, on_delete=models.PROTECT)
date = models.DateTimeField()
venue = models.ForeignKey('venue.Venue', on_delete=models.PROTECT)
@staticmethod
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
@staticmethod
def ticket_coverage():
return Show.objects.raw("""
select show.id,
show.production_id,
show.venue_id,
venue.name as venue_name,
production.name as production_name,
show.date,
count(distinct(ticket.id)) as ticket_count,
count(distinct(seat.id)) as seat_count,
100 * (count(distinct(ticket.id))::float / count(distinct(seat.id))) as sales_percentage
from show_show show
left outer join ticket_ticket ticket on ticket.show_id = show.id
left join venue_venue venue on show.venue_id = venue.id
left join venue_seatinggroup sg on sg.venue_id = venue.id
left join venue_seat seat on sg.id = seat.group_id
left join show_production production on show.production_id = production.id
group by show.id, venue.name, production.name
order by show.date desc
""")
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
def __str__(self):
return self.production.name + " " + self.date_string()
class Meta:
ordering = ('date',)
|
Add Show.ticket_coverage() to get statistics on coverage
|
Add Show.ticket_coverage() to get statistics on coverage
Very left join, much SQL, wow.
|
Python
|
mit
|
Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet
|
python
|
## Code Before:
from django.db import models
import datetime
class Production(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True)
def __str__(self):
return self.name
class Show(models.Model):
production = models.ForeignKey(Production, on_delete=models.PROTECT)
date = models.DateTimeField()
venue = models.ForeignKey('venue.Venue', on_delete=models.PROTECT)
@staticmethod
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
def __str__(self):
return self.production.name + " " + self.date_string()
class Meta:
ordering = ('date',)
## Instruction:
Add Show.ticket_coverage() to get statistics on coverage
Very left join, much SQL, wow.
## Code After:
from django.db import models
import datetime
class Production(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True)
def __str__(self):
return self.name
class Show(models.Model):
production = models.ForeignKey(Production, on_delete=models.PROTECT)
date = models.DateTimeField()
venue = models.ForeignKey('venue.Venue', on_delete=models.PROTECT)
@staticmethod
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
@staticmethod
def ticket_coverage():
return Show.objects.raw("""
select show.id,
show.production_id,
show.venue_id,
venue.name as venue_name,
production.name as production_name,
show.date,
count(distinct(ticket.id)) as ticket_count,
count(distinct(seat.id)) as seat_count,
100 * (count(distinct(ticket.id))::float / count(distinct(seat.id))) as sales_percentage
from show_show show
left outer join ticket_ticket ticket on ticket.show_id = show.id
left join venue_venue venue on show.venue_id = venue.id
left join venue_seatinggroup sg on sg.venue_id = venue.id
left join venue_seat seat on sg.id = seat.group_id
left join show_production production on show.production_id = production.id
group by show.id, venue.name, production.name
order by show.date desc
""")
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
def __str__(self):
return self.production.name + " " + self.date_string()
class Meta:
ordering = ('date',)
|
# ... existing code ...
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
@staticmethod
def ticket_coverage():
return Show.objects.raw("""
select show.id,
show.production_id,
show.venue_id,
venue.name as venue_name,
production.name as production_name,
show.date,
count(distinct(ticket.id)) as ticket_count,
count(distinct(seat.id)) as seat_count,
100 * (count(distinct(ticket.id))::float / count(distinct(seat.id))) as sales_percentage
from show_show show
left outer join ticket_ticket ticket on ticket.show_id = show.id
left join venue_venue venue on show.venue_id = venue.id
left join venue_seatinggroup sg on sg.venue_id = venue.id
left join venue_seat seat on sg.id = seat.group_id
left join show_production production on show.production_id = production.id
group by show.id, venue.name, production.name
order by show.date desc
""")
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
# ... rest of the code ...
|
77dc80e60b252833940dc6b2a1c512684ed8decd
|
doc/conf.py
|
doc/conf.py
|
extensions = [
'nbsphinx',
'sphinx.ext.mathjax',
]
# Exclude build directory and Jupyter backup files:
exclude_patterns = ['_build', '**.ipynb_checkpoints']
# -- The settings below this line are not specific to nbsphinx ------------
master_doc = 'index'
project = 'nbsphinx'
author = 'Matthias Geier'
copyright = '2016, ' + author
# -- Get version information from Git -------------------------------------
try:
from subprocess import check_output
release = check_output(['git', 'describe', '--tags', '--always'])
release = release.decode().strip()
except Exception:
release = '<unknown>'
# -- Options for HTML output ----------------------------------------------
html_title = project + ' version ' + release
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
'papersize': 'a4paper',
'preamble': r'\setcounter{tocdepth}{3}',
}
latex_documents = [
(master_doc, 'nbsphinx.tex', project, author, 'howto'),
]
latex_show_urls = 'footnote'
|
extensions = [
'nbsphinx',
'sphinx.ext.mathjax',
]
# Exclude build directory and Jupyter backup files:
exclude_patterns = ['_build', '**.ipynb_checkpoints']
# Default language for syntax highlighting (e.g. in Markdown cells)
highlight_language = 'none'
# -- The settings below this line are not specific to nbsphinx ------------
master_doc = 'index'
project = 'nbsphinx'
author = 'Matthias Geier'
copyright = '2016, ' + author
# -- Get version information from Git -------------------------------------
try:
from subprocess import check_output
release = check_output(['git', 'describe', '--tags', '--always'])
release = release.decode().strip()
except Exception:
release = '<unknown>'
# -- Options for HTML output ----------------------------------------------
html_title = project + ' version ' + release
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
'papersize': 'a4paper',
'preamble': r'\setcounter{tocdepth}{3}',
}
latex_documents = [
(master_doc, 'nbsphinx.tex', project, author, 'howto'),
]
latex_show_urls = 'footnote'
|
Set default syntax highlighting language to 'none'
|
DOC: Set default syntax highlighting language to 'none'
|
Python
|
mit
|
spatialaudio/nbsphinx,spatialaudio/nbsphinx,spatialaudio/nbsphinx
|
python
|
## Code Before:
extensions = [
'nbsphinx',
'sphinx.ext.mathjax',
]
# Exclude build directory and Jupyter backup files:
exclude_patterns = ['_build', '**.ipynb_checkpoints']
# -- The settings below this line are not specific to nbsphinx ------------
master_doc = 'index'
project = 'nbsphinx'
author = 'Matthias Geier'
copyright = '2016, ' + author
# -- Get version information from Git -------------------------------------
try:
from subprocess import check_output
release = check_output(['git', 'describe', '--tags', '--always'])
release = release.decode().strip()
except Exception:
release = '<unknown>'
# -- Options for HTML output ----------------------------------------------
html_title = project + ' version ' + release
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
'papersize': 'a4paper',
'preamble': r'\setcounter{tocdepth}{3}',
}
latex_documents = [
(master_doc, 'nbsphinx.tex', project, author, 'howto'),
]
latex_show_urls = 'footnote'
## Instruction:
DOC: Set default syntax highlighting language to 'none'
## Code After:
extensions = [
'nbsphinx',
'sphinx.ext.mathjax',
]
# Exclude build directory and Jupyter backup files:
exclude_patterns = ['_build', '**.ipynb_checkpoints']
# Default language for syntax highlighting (e.g. in Markdown cells)
highlight_language = 'none'
# -- The settings below this line are not specific to nbsphinx ------------
master_doc = 'index'
project = 'nbsphinx'
author = 'Matthias Geier'
copyright = '2016, ' + author
# -- Get version information from Git -------------------------------------
try:
from subprocess import check_output
release = check_output(['git', 'describe', '--tags', '--always'])
release = release.decode().strip()
except Exception:
release = '<unknown>'
# -- Options for HTML output ----------------------------------------------
html_title = project + ' version ' + release
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
'papersize': 'a4paper',
'preamble': r'\setcounter{tocdepth}{3}',
}
latex_documents = [
(master_doc, 'nbsphinx.tex', project, author, 'howto'),
]
latex_show_urls = 'footnote'
|
// ... existing code ...
# Exclude build directory and Jupyter backup files:
exclude_patterns = ['_build', '**.ipynb_checkpoints']
# Default language for syntax highlighting (e.g. in Markdown cells)
highlight_language = 'none'
# -- The settings below this line are not specific to nbsphinx ------------
// ... rest of the code ...
|
e6f7c6657485b33760e2522afb6b25ba5ed405fd
|
pyramid_zipkin/zipkin.py
|
pyramid_zipkin/zipkin.py
|
from py_zipkin.zipkin import create_http_headers_for_new_span \
as create_headers_for_new_span # pragma: no cover
|
from py_zipkin.zipkin import create_http_headers_for_new_span # pragma: no cover
# Backwards compatibility for places where pyramid_zipkin is unpinned
create_headers_for_new_span = create_http_headers_for_new_span # pragma: no cover
|
Split import into 2 lines to make flake8 happy
|
Split import into 2 lines to make flake8 happy
|
Python
|
apache-2.0
|
bplotnick/pyramid_zipkin,Yelp/pyramid_zipkin
|
python
|
## Code Before:
from py_zipkin.zipkin import create_http_headers_for_new_span \
as create_headers_for_new_span # pragma: no cover
## Instruction:
Split import into 2 lines to make flake8 happy
## Code After:
from py_zipkin.zipkin import create_http_headers_for_new_span # pragma: no cover
# Backwards compatibility for places where pyramid_zipkin is unpinned
create_headers_for_new_span = create_http_headers_for_new_span # pragma: no cover
|
...
from py_zipkin.zipkin import create_http_headers_for_new_span # pragma: no cover
# Backwards compatibility for places where pyramid_zipkin is unpinned
create_headers_for_new_span = create_http_headers_for_new_span # pragma: no cover
...
|
5fbe5b2e04acc36e9b63ce51b3d4bb0e083fef58
|
kurento-client/src/main/java/org/kurento/client/JsonRpcConnectionListenerKurento.java
|
kurento-client/src/main/java/org/kurento/client/JsonRpcConnectionListenerKurento.java
|
package org.kurento.client;
import org.kurento.jsonrpc.client.JsonRpcWSConnectionListener;
public class JsonRpcConnectionListenerKurento implements JsonRpcWSConnectionListener {
private KurentoConnectionListener listener;
public JsonRpcConnectionListenerKurento(KurentoConnectionListener listener) {
this.listener = listener;
}
@Override
public void connectionFailed() {
listener.connectionFailed();
}
@Override
public void connected() {
listener.connected();
}
@Override
public void disconnected() {
listener.disconnected();
}
@Override
public void reconnected(boolean sameServer) {
listener.reconnected(sameServer);
}
public static JsonRpcWSConnectionListener create(KurentoConnectionListener listener) {
if (listener == null) {
return null;
}
return new JsonRpcConnectionListenerKurento(listener);
}
@Override
public void reconnecting() {
}
}
|
package org.kurento.client;
import org.kurento.jsonrpc.client.JsonRpcWSConnectionListener;
public class JsonRpcConnectionListenerKurento implements JsonRpcWSConnectionListener {
private KurentoConnectionListener listener;
public JsonRpcConnectionListenerKurento(KurentoConnectionListener listener) {
this.listener = listener;
}
@Override
public void connectionFailed() {
listener.connectionFailed();
}
@Override
public void connected() {
listener.connected();
}
@Override
public void disconnected() {
listener.disconnected();
}
@Override
public void reconnected(boolean sameServer) {
listener.reconnected(sameServer);
}
public static JsonRpcWSConnectionListener create(KurentoConnectionListener listener) {
if (listener == null) {
return null;
}
return new JsonRpcConnectionListenerKurento(listener);
}
@Override
public void reconnecting() {
listener.disconnected();
}
}
|
Call disconnected event when reconnecting
|
[kurento-client] Call disconnected event when reconnecting
Change-Id: I0f0388f281fa80a1333f6bd18e60e563d4694cf3
|
Java
|
apache-2.0
|
Kurento/kurento-java,EugenioFidel/kurento-java,EugenioFidel/kurento-java,Kurento/kurento-java,Kurento/kurento-java,EugenioFidel/kurento-java,EugenioFidel/kurento-java,Kurento/kurento-java
|
java
|
## Code Before:
package org.kurento.client;
import org.kurento.jsonrpc.client.JsonRpcWSConnectionListener;
public class JsonRpcConnectionListenerKurento implements JsonRpcWSConnectionListener {
private KurentoConnectionListener listener;
public JsonRpcConnectionListenerKurento(KurentoConnectionListener listener) {
this.listener = listener;
}
@Override
public void connectionFailed() {
listener.connectionFailed();
}
@Override
public void connected() {
listener.connected();
}
@Override
public void disconnected() {
listener.disconnected();
}
@Override
public void reconnected(boolean sameServer) {
listener.reconnected(sameServer);
}
public static JsonRpcWSConnectionListener create(KurentoConnectionListener listener) {
if (listener == null) {
return null;
}
return new JsonRpcConnectionListenerKurento(listener);
}
@Override
public void reconnecting() {
}
}
## Instruction:
[kurento-client] Call disconnected event when reconnecting
Change-Id: I0f0388f281fa80a1333f6bd18e60e563d4694cf3
## Code After:
package org.kurento.client;
import org.kurento.jsonrpc.client.JsonRpcWSConnectionListener;
public class JsonRpcConnectionListenerKurento implements JsonRpcWSConnectionListener {
private KurentoConnectionListener listener;
public JsonRpcConnectionListenerKurento(KurentoConnectionListener listener) {
this.listener = listener;
}
@Override
public void connectionFailed() {
listener.connectionFailed();
}
@Override
public void connected() {
listener.connected();
}
@Override
public void disconnected() {
listener.disconnected();
}
@Override
public void reconnected(boolean sameServer) {
listener.reconnected(sameServer);
}
public static JsonRpcWSConnectionListener create(KurentoConnectionListener listener) {
if (listener == null) {
return null;
}
return new JsonRpcConnectionListenerKurento(listener);
}
@Override
public void reconnecting() {
listener.disconnected();
}
}
|
// ... existing code ...
package org.kurento.client;
import org.kurento.jsonrpc.client.JsonRpcWSConnectionListener;
// ... modified code ...
@Override
public void reconnecting() {
listener.disconnected();
}
}
// ... rest of the code ...
|
ca563ca11fe04202ae38799ee992a48e0a01fd86
|
material/admin/modules.py
|
material/admin/modules.py
|
from karenina import modules
class Admin(modules.InstallableModule):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff
|
from karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff
|
Add module declaration for karenina
|
Add module declaration for karenina
|
Python
|
bsd-3-clause
|
thiagoramos-luizalabs/django-material,refnode/django-material,lukasgarcya/django-material,viewflow/django-material,MonsterKiller/django-material,viewflow/django-material,barseghyanartur/django-material,MonsterKiller/django-material,un33k/django-material,afifnz/django-material,Axelio/django-material,viewflow/django-material,koopauy/django-material,barseghyanartur/django-material,pombredanne/django-material,pombredanne/django-material,koopauy/django-material,Axelio/django-material,koopauy/django-material,lukasgarcya/django-material,sourabhdattawad/django-material,2947721120/django-material,refnode/django-material,sourabhdattawad/django-material,pombredanne/django-material,barseghyanartur/django-material,2947721120/django-material,un33k/django-material,un33k/django-material,thiagoramos-luizalabs/django-material,lukasgarcya/django-material,MonsterKiller/django-material,2947721120/django-material,Axelio/django-material,refnode/django-material,afifnz/django-material,afifnz/django-material,sourabhdattawad/django-material,thiagoramos-luizalabs/django-material
|
python
|
## Code Before:
from karenina import modules
class Admin(modules.InstallableModule):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff
## Instruction:
Add module declaration for karenina
## Code After:
from karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff
|
// ... existing code ...
from karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
// ... rest of the code ...
|
35c44f0f585d11dea632e509b9eec20d4697dc9d
|
functions/eitu/timeedit_to_csv.py
|
functions/eitu/timeedit_to_csv.py
|
import requests
import csv
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicate events
events = {e['UID']: e for e in events}.values()
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events: writer.writerow(e)
|
import requests
import csv
from datetime import datetime
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicates and sort
events = {e['UID']: e for e in events}.values()
events = sorted(events, key=lambda e: e['DTSTART'])
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events:
for key, value in e.items():
if isinstance(value, datetime): e[key] = value.isoformat()
writer.writerow(e)
|
Sort events by start and iso format datetimes
|
Sort events by start and iso format datetimes
|
Python
|
mit
|
christianknu/eitu,christianknu/eitu,eitu/eitu,christianknu/eitu,eitu/eitu
|
python
|
## Code Before:
import requests
import csv
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicate events
events = {e['UID']: e for e in events}.values()
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events: writer.writerow(e)
## Instruction:
Sort events by start and iso format datetimes
## Code After:
import requests
import csv
from datetime import datetime
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics'
def fetch_and_parse(url):
return ics_parser.parse(requests.get(url).text)
# Fetch and parse iCalendar events
study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES)
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicates and sort
events = {e['UID']: e for e in events}.values()
events = sorted(events, key=lambda e: e['DTSTART'])
# Write csv
with open('timeedit.csv', 'w') as csvfile:
fieldnames = set()
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events:
for key, value in e.items():
if isinstance(value, datetime): e[key] = value.isoformat()
writer.writerow(e)
|
// ... existing code ...
import requests
import csv
from datetime import datetime
import ics_parser
URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics'
// ... modified code ...
activities = fetch_and_parse(URL_ACTIVITIES)
events = study_activities + activities
# Remove duplicates and sort
events = {e['UID']: e for e in events}.values()
events = sorted(events, key=lambda e: e['DTSTART'])
# Write csv
with open('timeedit.csv', 'w') as csvfile:
...
for e in events: fieldnames = fieldnames | set(e.keys())
writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames)))
writer.writeheader()
for e in events:
for key, value in e.items():
if isinstance(value, datetime): e[key] = value.isoformat()
writer.writerow(e)
// ... rest of the code ...
|
b04e7afbd56518ba0e825d70b11a0c88e2d6e29d
|
astm/tests/utils.py
|
astm/tests/utils.py
|
class DummyMixIn(object):
_input_buffer = ''
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
|
class DummyMixIn(object):
_input_buffer = ''
addr = ('localhost', '15200')
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
|
Set dummy address info for tests.
|
Set dummy address info for tests.
|
Python
|
bsd-3-clause
|
asingla87/python-astm,andrexmd/python-astm,pombreda/python-astm,mhaulo/python-astm,MarcosHaenisch/python-astm,briankip/python-astm,kxepal/python-astm,123412345/python-astm,tinoshot/python-astm,eddiep1101/python-astm,LogicalKnight/python-astm,Iskander1b/python-astm,AlanZatarain/python-astm,kxepal/python-astm,tectronics/python-astm,Alwnikrotikz/python-astm
|
python
|
## Code Before:
class DummyMixIn(object):
_input_buffer = ''
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
## Instruction:
Set dummy address info for tests.
## Code After:
class DummyMixIn(object):
_input_buffer = ''
addr = ('localhost', '15200')
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
|
...
class DummyMixIn(object):
_input_buffer = ''
addr = ('localhost', '15200')
def flush(self):
pass
...
|
ef72ce81c2d51cf99e44041a871a82c512badb8c
|
people/serializers.py
|
people/serializers.py
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
Make the phone number an int
|
Make the phone number an int
|
Python
|
apache-2.0
|
rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory
|
python
|
## Code Before:
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
## Instruction:
Make the phone number an int
## Code After:
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
# ... existing code ...
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
# ... rest of the code ...
|
d6f3fb33edc1c89c3cd569f1a43ce7f5de8e166a
|
test/guice/GlobalTest.java
|
test/guice/GlobalTest.java
|
package guice;
import play.Application;
import services.AuthenticationService;
import services.StubConfigurationImplTest;
import services.ConfigurationService;
import services.ESConstantImpl;
import services.ESConstantService;
import services.ESSearchImpl;
import services.ESSearchService;
import stub.StubAuthenticationImplTest;
import utils.EncodeUtils;
import utils.IEncodeUtils;
import utils.eslasticsearch.ESServerEmbedded;
import utils.eslasticsearch.IESServerEmbedded;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import configuration.GlobalConfiguration;
import controllers.AuthenticationController;
public class GlobalTest extends GlobalConfiguration {
@Override
protected AbstractModule buildAbstractModule() {
return new AbstractModule() {
@Override
protected void configure() {
bind(IESServerEmbedded.class).to(ESServerEmbedded.class);
bind(IEncodeUtils.class).to(EncodeUtils.class);
bind(ESSearchService.class).to(ESSearchImpl.class);
bind(ESConstantService.class).to(ESConstantImpl.class);
bind(ConfigurationService.class).to(StubConfigurationImplTest.class);
bind(AuthenticationService.class).to(StubAuthenticationImplTest.class);
bind(AuthenticationController.class).asEagerSingleton();
}
};
}
@Override
public void onStart(Application application) {
this.injector = Guice.createInjector(buildAbstractModule());
}
@Override
public void onStop(Application arg0) {
// do nothing
}
}
|
package guice;
import play.Application;
import services.AuthenticationService;
import services.StubConfigurationImplTest;
import services.ConfigurationService;
import services.ESConstantImpl;
import services.ESConstantService;
import services.ESSearchImpl;
import services.ESSearchService;
import stub.StubAuthenticationImplTest;
import utils.EncodeUtils;
import utils.FileUtils;
import utils.IEncodeUtils;
import utils.IFileUtils;
import utils.eslasticsearch.ESServerEmbedded;
import utils.eslasticsearch.IESServerEmbedded;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import configuration.GlobalConfiguration;
import controllers.AuthenticationController;
public class GlobalTest extends GlobalConfiguration {
@Override
protected AbstractModule buildAbstractModule() {
return new AbstractModule() {
@Override
protected void configure() {
bind(IESServerEmbedded.class).to(ESServerEmbedded.class);
bind(IEncodeUtils.class).to(EncodeUtils.class);
bind(IFileUtils.class).to(FileUtils.class);
bind(ESSearchService.class).to(ESSearchImpl.class);
bind(ESConstantService.class).to(ESConstantImpl.class);
bind(ConfigurationService.class).to(StubConfigurationImplTest.class);
bind(AuthenticationService.class).to(StubAuthenticationImplTest.class);
bind(AuthenticationController.class).asEagerSingleton();
}
};
}
@Override
public void onStart(Application application) {
this.injector = Guice.createInjector(buildAbstractModule());
}
@Override
public void onStop(Application arg0) {
// do nothing
}
}
|
Fix test: Missing Guice dependency
|
Fix test: Missing Guice dependency
|
Java
|
apache-2.0
|
joakim-ribier/reactive-elasticsearch-play,joakim-ribier/reactive-elasticsearch-play,joakim-ribier/reactive-elasticsearch-play
|
java
|
## Code Before:
package guice;
import play.Application;
import services.AuthenticationService;
import services.StubConfigurationImplTest;
import services.ConfigurationService;
import services.ESConstantImpl;
import services.ESConstantService;
import services.ESSearchImpl;
import services.ESSearchService;
import stub.StubAuthenticationImplTest;
import utils.EncodeUtils;
import utils.IEncodeUtils;
import utils.eslasticsearch.ESServerEmbedded;
import utils.eslasticsearch.IESServerEmbedded;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import configuration.GlobalConfiguration;
import controllers.AuthenticationController;
public class GlobalTest extends GlobalConfiguration {
@Override
protected AbstractModule buildAbstractModule() {
return new AbstractModule() {
@Override
protected void configure() {
bind(IESServerEmbedded.class).to(ESServerEmbedded.class);
bind(IEncodeUtils.class).to(EncodeUtils.class);
bind(ESSearchService.class).to(ESSearchImpl.class);
bind(ESConstantService.class).to(ESConstantImpl.class);
bind(ConfigurationService.class).to(StubConfigurationImplTest.class);
bind(AuthenticationService.class).to(StubAuthenticationImplTest.class);
bind(AuthenticationController.class).asEagerSingleton();
}
};
}
@Override
public void onStart(Application application) {
this.injector = Guice.createInjector(buildAbstractModule());
}
@Override
public void onStop(Application arg0) {
// do nothing
}
}
## Instruction:
Fix test: Missing Guice dependency
## Code After:
package guice;
import play.Application;
import services.AuthenticationService;
import services.StubConfigurationImplTest;
import services.ConfigurationService;
import services.ESConstantImpl;
import services.ESConstantService;
import services.ESSearchImpl;
import services.ESSearchService;
import stub.StubAuthenticationImplTest;
import utils.EncodeUtils;
import utils.FileUtils;
import utils.IEncodeUtils;
import utils.IFileUtils;
import utils.eslasticsearch.ESServerEmbedded;
import utils.eslasticsearch.IESServerEmbedded;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import configuration.GlobalConfiguration;
import controllers.AuthenticationController;
public class GlobalTest extends GlobalConfiguration {
@Override
protected AbstractModule buildAbstractModule() {
return new AbstractModule() {
@Override
protected void configure() {
bind(IESServerEmbedded.class).to(ESServerEmbedded.class);
bind(IEncodeUtils.class).to(EncodeUtils.class);
bind(IFileUtils.class).to(FileUtils.class);
bind(ESSearchService.class).to(ESSearchImpl.class);
bind(ESConstantService.class).to(ESConstantImpl.class);
bind(ConfigurationService.class).to(StubConfigurationImplTest.class);
bind(AuthenticationService.class).to(StubAuthenticationImplTest.class);
bind(AuthenticationController.class).asEagerSingleton();
}
};
}
@Override
public void onStart(Application application) {
this.injector = Guice.createInjector(buildAbstractModule());
}
@Override
public void onStop(Application arg0) {
// do nothing
}
}
|
# ... existing code ...
import services.ESSearchService;
import stub.StubAuthenticationImplTest;
import utils.EncodeUtils;
import utils.FileUtils;
import utils.IEncodeUtils;
import utils.IFileUtils;
import utils.eslasticsearch.ESServerEmbedded;
import utils.eslasticsearch.IESServerEmbedded;
# ... modified code ...
protected void configure() {
bind(IESServerEmbedded.class).to(ESServerEmbedded.class);
bind(IEncodeUtils.class).to(EncodeUtils.class);
bind(IFileUtils.class).to(FileUtils.class);
bind(ESSearchService.class).to(ESSearchImpl.class);
bind(ESConstantService.class).to(ESConstantImpl.class);
# ... rest of the code ...
|
dd2c92bea635d7cfc93b437ce32266126bceb1e9
|
qipipe/helpers/bolus_arrival.py
|
qipipe/helpers/bolus_arrival.py
|
class BolusArrivalError(Exception):
pass
def bolus_arrival_index(time_series):
"""
Determines the DCE bolus arrival series index. The bolus arrival is
the first series with a difference in average signal larger than
double the difference from first two points.
:param time_series: the 4D NiFTI scan image file path
:return: the bolus arrival series index
:raise BolusArrivalError: if the bolus arrival could not be determined
"""
import nibabel as nb
import numpy as np
nii = nb.load(time_series)
data = nii.get_data()
n_vols = data.shape[-1]
signal_means = np.array([np.mean(data[:,:,:, idx])
for idx in xrange(n_vols)])
signal_diffs = np.diff(signal_means)
# If we see a difference in average signal larger than double the
# difference from first two points, take that as bolus arrival.
base_diff = np.abs(signal_diffs[0])
for idx, diff_val in enumerate(signal_diffs[1:]):
if diff_val > 2 * base_diff:
return idx + 1
else:
raise BolusArrivalError("Unable to determine bolus arrival")
|
class BolusArrivalError(Exception):
pass
def bolus_arrival_index(time_series):
"""
Determines the DCE bolus arrival time point index. The bolus arrival
is the first occurence of a difference in average signal larger than
double the difference from first two points.
:param time_series: the 4D NiFTI scan image file path
:return: the bolus arrival time point index
:raise BolusArrivalError: if the bolus arrival could not be determined
"""
import nibabel as nb
import numpy as np
nii = nb.load(time_series)
data = nii.get_data()
n_vols = data.shape[-1]
signal_means = np.array([np.mean(data[:,:,:, idx])
for idx in xrange(n_vols)])
signal_diffs = np.diff(signal_means)
# If we see a difference in average signal larger than double the
# difference from first two points, take that as bolus arrival.
base_diff = np.abs(signal_diffs[0])
for idx, diff_val in enumerate(signal_diffs[1:]):
if diff_val > 2 * base_diff:
return idx + 1
else:
raise BolusArrivalError("Unable to determine bolus arrival")
|
Change series to time point.
|
Change series to time point.
|
Python
|
bsd-2-clause
|
ohsu-qin/qipipe
|
python
|
## Code Before:
class BolusArrivalError(Exception):
pass
def bolus_arrival_index(time_series):
"""
Determines the DCE bolus arrival series index. The bolus arrival is
the first series with a difference in average signal larger than
double the difference from first two points.
:param time_series: the 4D NiFTI scan image file path
:return: the bolus arrival series index
:raise BolusArrivalError: if the bolus arrival could not be determined
"""
import nibabel as nb
import numpy as np
nii = nb.load(time_series)
data = nii.get_data()
n_vols = data.shape[-1]
signal_means = np.array([np.mean(data[:,:,:, idx])
for idx in xrange(n_vols)])
signal_diffs = np.diff(signal_means)
# If we see a difference in average signal larger than double the
# difference from first two points, take that as bolus arrival.
base_diff = np.abs(signal_diffs[0])
for idx, diff_val in enumerate(signal_diffs[1:]):
if diff_val > 2 * base_diff:
return idx + 1
else:
raise BolusArrivalError("Unable to determine bolus arrival")
## Instruction:
Change series to time point.
## Code After:
class BolusArrivalError(Exception):
pass
def bolus_arrival_index(time_series):
"""
Determines the DCE bolus arrival time point index. The bolus arrival
is the first occurence of a difference in average signal larger than
double the difference from first two points.
:param time_series: the 4D NiFTI scan image file path
:return: the bolus arrival time point index
:raise BolusArrivalError: if the bolus arrival could not be determined
"""
import nibabel as nb
import numpy as np
nii = nb.load(time_series)
data = nii.get_data()
n_vols = data.shape[-1]
signal_means = np.array([np.mean(data[:,:,:, idx])
for idx in xrange(n_vols)])
signal_diffs = np.diff(signal_means)
# If we see a difference in average signal larger than double the
# difference from first two points, take that as bolus arrival.
base_diff = np.abs(signal_diffs[0])
for idx, diff_val in enumerate(signal_diffs[1:]):
if diff_val > 2 * base_diff:
return idx + 1
else:
raise BolusArrivalError("Unable to determine bolus arrival")
|
# ... existing code ...
def bolus_arrival_index(time_series):
"""
Determines the DCE bolus arrival time point index. The bolus arrival
is the first occurence of a difference in average signal larger than
double the difference from first two points.
:param time_series: the 4D NiFTI scan image file path
:return: the bolus arrival time point index
:raise BolusArrivalError: if the bolus arrival could not be determined
"""
import nibabel as nb
# ... rest of the code ...
|
d9d5db9efedbf1ca9ddd11e0f2eff2a4b04afe90
|
stutterfuzz.c
|
stutterfuzz.c
|
static uint64_t sqrt64(uint64_t n) {
uint64_t g = UINT64_C(1) << 31;
for (uint64_t c = g; c; g |= c) {
if (g * g > n) {
g ^= c;
}
c >>= 1;
}
return g;
}
static uint64_t get_split(uint64_t len) {
uint64_t rnd;
rand_fill(&rnd, sizeof(rnd));
rnd %= (len * len);
return sqrt64(rnd) + 1;
}
int main(int __attribute__ ((unused)) argc, char __attribute__ ((unused)) *argv[]) {
rand_init();
for (uint64_t len = 1397; len;) {
uint64_t consume = get_split(len);
fprintf(stderr, "consume %ju bytes\n", (uintmax_t) consume);
len -= consume;
}
rand_cleanup();
}
|
static uint64_t get_split(uint64_t total_len, uint64_t remaining_len) {
uint64_t rnd;
rand_fill(&rnd, sizeof(rnd));
rnd %= total_len;
return rnd > remaining_len ? remaining_len : rnd;
}
int main(int __attribute__ ((unused)) argc, char __attribute__ ((unused)) *argv[]) {
rand_init();
uint64_t total_len = 1397;
for (uint64_t remaining = total_len, consume = 0; remaining; remaining -= consume) {
consume = get_split(total_len, remaining);
fprintf(stderr, "consume %ju bytes\n", (uintmax_t) consume);
}
rand_cleanup();
}
|
Fix random algo to evenly distribute.
|
Fix random algo to evenly distribute.
|
C
|
apache-2.0
|
flamingcowtv/stutterfuzz
|
c
|
## Code Before:
static uint64_t sqrt64(uint64_t n) {
uint64_t g = UINT64_C(1) << 31;
for (uint64_t c = g; c; g |= c) {
if (g * g > n) {
g ^= c;
}
c >>= 1;
}
return g;
}
static uint64_t get_split(uint64_t len) {
uint64_t rnd;
rand_fill(&rnd, sizeof(rnd));
rnd %= (len * len);
return sqrt64(rnd) + 1;
}
int main(int __attribute__ ((unused)) argc, char __attribute__ ((unused)) *argv[]) {
rand_init();
for (uint64_t len = 1397; len;) {
uint64_t consume = get_split(len);
fprintf(stderr, "consume %ju bytes\n", (uintmax_t) consume);
len -= consume;
}
rand_cleanup();
}
## Instruction:
Fix random algo to evenly distribute.
## Code After:
static uint64_t get_split(uint64_t total_len, uint64_t remaining_len) {
uint64_t rnd;
rand_fill(&rnd, sizeof(rnd));
rnd %= total_len;
return rnd > remaining_len ? remaining_len : rnd;
}
int main(int __attribute__ ((unused)) argc, char __attribute__ ((unused)) *argv[]) {
rand_init();
uint64_t total_len = 1397;
for (uint64_t remaining = total_len, consume = 0; remaining; remaining -= consume) {
consume = get_split(total_len, remaining);
fprintf(stderr, "consume %ju bytes\n", (uintmax_t) consume);
}
rand_cleanup();
}
|
...
static uint64_t get_split(uint64_t total_len, uint64_t remaining_len) {
uint64_t rnd;
rand_fill(&rnd, sizeof(rnd));
rnd %= total_len;
return rnd > remaining_len ? remaining_len : rnd;
}
int main(int __attribute__ ((unused)) argc, char __attribute__ ((unused)) *argv[]) {
rand_init();
uint64_t total_len = 1397;
for (uint64_t remaining = total_len, consume = 0; remaining; remaining -= consume) {
consume = get_split(total_len, remaining);
fprintf(stderr, "consume %ju bytes\n", (uintmax_t) consume);
}
rand_cleanup();
...
|
cf16c64e378f64d2267f75444c568aed895f940c
|
setup.py
|
setup.py
|
import platform, sys
from distutils.core import setup
from distextend import *
packages, package_data = findPackages("countershape")
setup(
name = "countershape",
version = "0.1",
description = "A framework for rendering static documentation.",
author = "Nullcube Pty Ltd",
author_email = "[email protected]",
url = "http://dev.nullcube.com",
packages = packages,
package_data = package_data,
scripts = ["cshape"],
)
|
import platform, sys
from distutils.core import setup
from distextend import *
packages, package_data = findPackages("countershape")
setup(
name = "countershape",
version = "0.1",
description = "A framework for rendering static documentation.",
author = "Nullcube Pty Ltd",
author_email = "[email protected]",
url = "http://dev.nullcube.com",
packages = packages,
package_data = package_data,
scripts = ["cshape", "csblog"],
)
|
Add csblog to installed scripts.
|
Add csblog to installed scripts.
|
Python
|
mit
|
mhils/countershape,samtaufa/countershape,cortesi/countershape,cortesi/countershape,samtaufa/countershape,mhils/countershape
|
python
|
## Code Before:
import platform, sys
from distutils.core import setup
from distextend import *
packages, package_data = findPackages("countershape")
setup(
name = "countershape",
version = "0.1",
description = "A framework for rendering static documentation.",
author = "Nullcube Pty Ltd",
author_email = "[email protected]",
url = "http://dev.nullcube.com",
packages = packages,
package_data = package_data,
scripts = ["cshape"],
)
## Instruction:
Add csblog to installed scripts.
## Code After:
import platform, sys
from distutils.core import setup
from distextend import *
packages, package_data = findPackages("countershape")
setup(
name = "countershape",
version = "0.1",
description = "A framework for rendering static documentation.",
author = "Nullcube Pty Ltd",
author_email = "[email protected]",
url = "http://dev.nullcube.com",
packages = packages,
package_data = package_data,
scripts = ["cshape", "csblog"],
)
|
...
url = "http://dev.nullcube.com",
packages = packages,
package_data = package_data,
scripts = ["cshape", "csblog"],
)
...
|
ed3906b295669b1c0e38d88a7eb19cdde324042b
|
pybuild/packages/libzmq.py
|
pybuild/packages/libzmq.py
|
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
|
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
self.system(
f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi'
)
|
Fix issue for building PyZMQ
|
Fix issue for building PyZMQ
|
Python
|
apache-2.0
|
qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core
|
python
|
## Code Before:
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
## Instruction:
Fix issue for building PyZMQ
## Code After:
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
self.system(
f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi'
)
|
...
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
self.system(
f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi'
)
...
|
884ccfb98a4cf5c1f12fb5b71271a238379e1d96
|
examples/src/example_reduce.c
|
examples/src/example_reduce.c
|
/*
* example2.c
*
* Created on: 3 May 2016
* Author: nick
*/
#include "ndm.h"
#include <mpi.h>
#include <stdio.h>
void recvFunction(void*, NDM_Metadata);
int main(int argc, char* argv[]) {
int provided;
MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &provided);
ndmInit();
char uuid[10];
int data = 10;
ndmReduce(&data, 1, NDM_INT, NDM_SUM, recvFunction, 0, NDM_GLOBAL_GROUP, "a");
ndmGroupRank(NDM_GLOBAL_GROUP, &data);
ndmAllReduce(&data, 1, NDM_INT, NDM_MAX, recvFunction, NDM_GLOBAL_GROUP, "maxrank");
ndmFinalise();
MPI_Finalize();
return 0;
}
void recvFunction(void* buffer, NDM_Metadata metaData) {
printf("Got reduction data '%d' with uuid %s on pid %d\n", *((int*)buffer), metaData.unique_id, metaData.my_rank);
}
|
/*
* example2.c
*
* Created on: 3 May 2016
* Author: nick
*/
#include "ndm.h"
#include <mpi.h>
#include <stdio.h>
void recvFunction(void*, NDM_Metadata);
void additiveRecvFunction(void*, NDM_Metadata);
int main(int argc, char* argv[]) {
int provided;
MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &provided);
ndmInit();
char uuid[10];
int data = 10;
ndmReduce(&data, 1, NDM_INT, NDM_SUM, recvFunction, 0, NDM_GLOBAL_GROUP, "a");
ndmGroupRank(NDM_GLOBAL_GROUP, &data);
ndmAllReduce(&data, 1, NDM_INT, NDM_MAX, recvFunction, NDM_GLOBAL_GROUP, "maxrank");
data = 5;
ndmReduceAdditive(&data, 1, NDM_INT, 12, 1, 0, NDM_SUM, additiveRecvFunction, 0, NDM_GLOBAL_GROUP, "additive");
ndmReduceAdditive(&data, 1, NDM_INT, 12, 1, 0, NDM_SUM, additiveRecvFunction, 0, NDM_GLOBAL_GROUP, "additive");
ndmFinalise();
MPI_Finalize();
return 0;
}
void recvFunction(void* buffer, NDM_Metadata metaData) {
printf("Got reduction data '%d' with uuid %s on pid %d\n", *((int*)buffer), metaData.unique_id, metaData.my_rank);
}
void additiveRecvFunction(void* buffer, NDM_Metadata metaData) { printf("Got additive data '%d'\n", *((int*)buffer)); }
|
Reduce example uses additive calls
|
Reduce example uses additive calls
|
C
|
mit
|
mesham/ndm,mesham/ndm
|
c
|
## Code Before:
/*
* example2.c
*
* Created on: 3 May 2016
* Author: nick
*/
#include "ndm.h"
#include <mpi.h>
#include <stdio.h>
void recvFunction(void*, NDM_Metadata);
int main(int argc, char* argv[]) {
int provided;
MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &provided);
ndmInit();
char uuid[10];
int data = 10;
ndmReduce(&data, 1, NDM_INT, NDM_SUM, recvFunction, 0, NDM_GLOBAL_GROUP, "a");
ndmGroupRank(NDM_GLOBAL_GROUP, &data);
ndmAllReduce(&data, 1, NDM_INT, NDM_MAX, recvFunction, NDM_GLOBAL_GROUP, "maxrank");
ndmFinalise();
MPI_Finalize();
return 0;
}
void recvFunction(void* buffer, NDM_Metadata metaData) {
printf("Got reduction data '%d' with uuid %s on pid %d\n", *((int*)buffer), metaData.unique_id, metaData.my_rank);
}
## Instruction:
Reduce example uses additive calls
## Code After:
/*
* example2.c
*
* Created on: 3 May 2016
* Author: nick
*/
#include "ndm.h"
#include <mpi.h>
#include <stdio.h>
void recvFunction(void*, NDM_Metadata);
void additiveRecvFunction(void*, NDM_Metadata);
int main(int argc, char* argv[]) {
int provided;
MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &provided);
ndmInit();
char uuid[10];
int data = 10;
ndmReduce(&data, 1, NDM_INT, NDM_SUM, recvFunction, 0, NDM_GLOBAL_GROUP, "a");
ndmGroupRank(NDM_GLOBAL_GROUP, &data);
ndmAllReduce(&data, 1, NDM_INT, NDM_MAX, recvFunction, NDM_GLOBAL_GROUP, "maxrank");
data = 5;
ndmReduceAdditive(&data, 1, NDM_INT, 12, 1, 0, NDM_SUM, additiveRecvFunction, 0, NDM_GLOBAL_GROUP, "additive");
ndmReduceAdditive(&data, 1, NDM_INT, 12, 1, 0, NDM_SUM, additiveRecvFunction, 0, NDM_GLOBAL_GROUP, "additive");
ndmFinalise();
MPI_Finalize();
return 0;
}
void recvFunction(void* buffer, NDM_Metadata metaData) {
printf("Got reduction data '%d' with uuid %s on pid %d\n", *((int*)buffer), metaData.unique_id, metaData.my_rank);
}
void additiveRecvFunction(void* buffer, NDM_Metadata metaData) { printf("Got additive data '%d'\n", *((int*)buffer)); }
|
# ... existing code ...
#include <stdio.h>
void recvFunction(void*, NDM_Metadata);
void additiveRecvFunction(void*, NDM_Metadata);
int main(int argc, char* argv[]) {
int provided;
# ... modified code ...
ndmReduce(&data, 1, NDM_INT, NDM_SUM, recvFunction, 0, NDM_GLOBAL_GROUP, "a");
ndmGroupRank(NDM_GLOBAL_GROUP, &data);
ndmAllReduce(&data, 1, NDM_INT, NDM_MAX, recvFunction, NDM_GLOBAL_GROUP, "maxrank");
data = 5;
ndmReduceAdditive(&data, 1, NDM_INT, 12, 1, 0, NDM_SUM, additiveRecvFunction, 0, NDM_GLOBAL_GROUP, "additive");
ndmReduceAdditive(&data, 1, NDM_INT, 12, 1, 0, NDM_SUM, additiveRecvFunction, 0, NDM_GLOBAL_GROUP, "additive");
ndmFinalise();
MPI_Finalize();
return 0;
...
void recvFunction(void* buffer, NDM_Metadata metaData) {
printf("Got reduction data '%d' with uuid %s on pid %d\n", *((int*)buffer), metaData.unique_id, metaData.my_rank);
}
void additiveRecvFunction(void* buffer, NDM_Metadata metaData) { printf("Got additive data '%d'\n", *((int*)buffer)); }
# ... rest of the code ...
|
9d5abdaefa483574cdd81da8d8d4e63ef68f5ab8
|
crossfolium/__init__.py
|
crossfolium/__init__.py
|
import crossfolium.marker_function as marker_function
from crossfolium.crossfolium import (
Crossfilter,
PieFilter,
RowBarFilter,
BarFilter,
TableFilter,
CountFilter,
ResetFilter,
GeoChoroplethFilter,
)
from .map import (
FeatureGroupFilter,
HeatmapFilter,
)
__version__ = "0.0.0"
__all__ = [
'__version__',
'marker_function',
'Crossfilter',
'PieFilter',
'RowBarFilter',
'BarFilter',
'FeatureGroupFilter',
'TableFilter',
'CountFilter',
'ResetFilter',
'HeatmapFilter',
'GeoChoroplethFilter',
]
|
from __future__ import absolute_import
from crossfolium import marker_function
from crossfolium.crossfolium import (
Crossfilter,
PieFilter,
RowBarFilter,
BarFilter,
TableFilter,
CountFilter,
ResetFilter,
GeoChoroplethFilter,
)
from crossfolium.map import (
FeatureGroupFilter,
HeatmapFilter,
)
__version__ = "0.0.0"
__all__ = [
'__version__',
'marker_function',
'Crossfilter',
'PieFilter',
'RowBarFilter',
'BarFilter',
'FeatureGroupFilter',
'TableFilter',
'CountFilter',
'ResetFilter',
'HeatmapFilter',
'GeoChoroplethFilter',
]
|
Handle absolute import for py27
|
Handle absolute import for py27
|
Python
|
mit
|
BibMartin/crossfolium,BibMartin/crossfolium
|
python
|
## Code Before:
import crossfolium.marker_function as marker_function
from crossfolium.crossfolium import (
Crossfilter,
PieFilter,
RowBarFilter,
BarFilter,
TableFilter,
CountFilter,
ResetFilter,
GeoChoroplethFilter,
)
from .map import (
FeatureGroupFilter,
HeatmapFilter,
)
__version__ = "0.0.0"
__all__ = [
'__version__',
'marker_function',
'Crossfilter',
'PieFilter',
'RowBarFilter',
'BarFilter',
'FeatureGroupFilter',
'TableFilter',
'CountFilter',
'ResetFilter',
'HeatmapFilter',
'GeoChoroplethFilter',
]
## Instruction:
Handle absolute import for py27
## Code After:
from __future__ import absolute_import
from crossfolium import marker_function
from crossfolium.crossfolium import (
Crossfilter,
PieFilter,
RowBarFilter,
BarFilter,
TableFilter,
CountFilter,
ResetFilter,
GeoChoroplethFilter,
)
from crossfolium.map import (
FeatureGroupFilter,
HeatmapFilter,
)
__version__ = "0.0.0"
__all__ = [
'__version__',
'marker_function',
'Crossfilter',
'PieFilter',
'RowBarFilter',
'BarFilter',
'FeatureGroupFilter',
'TableFilter',
'CountFilter',
'ResetFilter',
'HeatmapFilter',
'GeoChoroplethFilter',
]
|
...
from __future__ import absolute_import
from crossfolium import marker_function
from crossfolium.crossfolium import (
Crossfilter,
...
GeoChoroplethFilter,
)
from crossfolium.map import (
FeatureGroupFilter,
HeatmapFilter,
)
...
|
54a345eb96bce8c3035b402ce009b1e3fda46a42
|
quran_text/serializers.py
|
quran_text/serializers.py
|
from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
class Meta:
model = Ayah
fields = ['sura', 'number', 'text']
|
from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
sura_id = serializers.IntegerField(source='sura.pk')
sura_name = serializers.CharField(source='sura.name')
ayah_number = serializers.IntegerField(source='number')
class Meta:
model = Ayah
fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
|
Change label and add Sura name to Ayah Serlialzer
|
Change label and add Sura name to Ayah Serlialzer
|
Python
|
mit
|
EmadMokhtar/tafseer_api
|
python
|
## Code Before:
from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
class Meta:
model = Ayah
fields = ['sura', 'number', 'text']
## Instruction:
Change label and add Sura name to Ayah Serlialzer
## Code After:
from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
sura_id = serializers.IntegerField(source='sura.pk')
sura_name = serializers.CharField(source='sura.name')
ayah_number = serializers.IntegerField(source='number')
class Meta:
model = Ayah
fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
|
// ... existing code ...
class AyahSerializer(serializers.ModelSerializer):
sura_id = serializers.IntegerField(source='sura.pk')
sura_name = serializers.CharField(source='sura.name')
ayah_number = serializers.IntegerField(source='number')
class Meta:
model = Ayah
fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
// ... rest of the code ...
|
afa9f4364127a01d4621531642149285274d15b7
|
src/main/java/com/fatico/winthing/Settings.java
|
src/main/java/com/fatico/winthing/Settings.java
|
package com.fatico.winthing;
public abstract class Settings {
public static final String BROKER_URL = "winthing.brokerUrl";
public static final String BROKER_USERNAME = "winthing.brokerUsername";
public static final String BROKER_PASSWORD = "winthing.brokerPassword";
public static final String CLIENT_ID = "winthing.clientId";
public static final String TOPIC_PREFIX = "winthing.topicPrefix";
public static final String RECONNECT_INTERVAL = "winthing.reconnectInterval";
}
|
package com.fatico.winthing;
public abstract class Settings {
public static final String BROKER_URL = "broker";
public static final String BROKER_USERNAME = "username";
public static final String BROKER_PASSWORD = "password";
public static final String CLIENT_ID = "clientid";
public static final String TOPIC_PREFIX = "prefix";
public static final String RECONNECT_INTERVAL = "reconnect";
}
|
Remove prefix from config variables
|
Remove prefix from config variables
|
Java
|
apache-2.0
|
msiedlarek/winthing
|
java
|
## Code Before:
package com.fatico.winthing;
public abstract class Settings {
public static final String BROKER_URL = "winthing.brokerUrl";
public static final String BROKER_USERNAME = "winthing.brokerUsername";
public static final String BROKER_PASSWORD = "winthing.brokerPassword";
public static final String CLIENT_ID = "winthing.clientId";
public static final String TOPIC_PREFIX = "winthing.topicPrefix";
public static final String RECONNECT_INTERVAL = "winthing.reconnectInterval";
}
## Instruction:
Remove prefix from config variables
## Code After:
package com.fatico.winthing;
public abstract class Settings {
public static final String BROKER_URL = "broker";
public static final String BROKER_USERNAME = "username";
public static final String BROKER_PASSWORD = "password";
public static final String CLIENT_ID = "clientid";
public static final String TOPIC_PREFIX = "prefix";
public static final String RECONNECT_INTERVAL = "reconnect";
}
|
// ... existing code ...
public abstract class Settings {
public static final String BROKER_URL = "broker";
public static final String BROKER_USERNAME = "username";
public static final String BROKER_PASSWORD = "password";
public static final String CLIENT_ID = "clientid";
public static final String TOPIC_PREFIX = "prefix";
public static final String RECONNECT_INTERVAL = "reconnect";
}
// ... rest of the code ...
|
dcc188fbc012400d1fdc4aa5dd381a031b024cc8
|
mods/railcraft/api/core/IOwnable.java
|
mods/railcraft/api/core/IOwnable.java
|
/*
* ******************************************************************************
* Copyright 2011-2015 CovertJaguar
*
* This work (the API) is licensed under the "MIT" License, see LICENSE.md for details.
* ***************************************************************************
*/
package mods.railcraft.api.core;
import com.mojang.authlib.GameProfile;
/**
* Implemented by objects that can be owned.
* <p/>
* Among other uses, when used on a Tile Entity, the Magnifying Glass can be used to inspect the owner.
*
* @author CovertJaguar <http://www.railcraft.info/>
*/
public interface IOwnable {
/**
* Returns the GameProfile of the owner of the object.
*
*/
GameProfile getOwner();
/**
* Returns a localization tag (object-tag.name) that can be used in chat messages and such.
*
*/
String getLocalizationTag();
}
|
/*
* ******************************************************************************
* Copyright 2011-2015 CovertJaguar
*
* This work (the API) is licensed under the "MIT" License, see LICENSE.md for details.
* ***************************************************************************
*/
package mods.railcraft.api.core;
import com.mojang.authlib.GameProfile;
import net.minecraft.world.IWorldNameable;
/**
* Implemented by objects that can be owned.
* <p/>
* Among other uses, when used on a Tile Entity, the Magnifying Glass can be used to inspect the owner.
*
* @author CovertJaguar <http://www.railcraft.info/>
*/
public interface IOwnable extends IWorldNameable {
/**
* Returns the GameProfile of the owner of the object.
*/
GameProfile getOwner();
}
|
Use the new IWorldNameable interface where possible to keep things standardized.
|
Use the new IWorldNameable interface where possible to keep things standardized.
|
Java
|
mit
|
liachmodded/Railcraft-API,CovertJaguar/Railcraft-API
|
java
|
## Code Before:
/*
* ******************************************************************************
* Copyright 2011-2015 CovertJaguar
*
* This work (the API) is licensed under the "MIT" License, see LICENSE.md for details.
* ***************************************************************************
*/
package mods.railcraft.api.core;
import com.mojang.authlib.GameProfile;
/**
* Implemented by objects that can be owned.
* <p/>
* Among other uses, when used on a Tile Entity, the Magnifying Glass can be used to inspect the owner.
*
* @author CovertJaguar <http://www.railcraft.info/>
*/
public interface IOwnable {
/**
* Returns the GameProfile of the owner of the object.
*
*/
GameProfile getOwner();
/**
* Returns a localization tag (object-tag.name) that can be used in chat messages and such.
*
*/
String getLocalizationTag();
}
## Instruction:
Use the new IWorldNameable interface where possible to keep things standardized.
## Code After:
/*
* ******************************************************************************
* Copyright 2011-2015 CovertJaguar
*
* This work (the API) is licensed under the "MIT" License, see LICENSE.md for details.
* ***************************************************************************
*/
package mods.railcraft.api.core;
import com.mojang.authlib.GameProfile;
import net.minecraft.world.IWorldNameable;
/**
* Implemented by objects that can be owned.
* <p/>
* Among other uses, when used on a Tile Entity, the Magnifying Glass can be used to inspect the owner.
*
* @author CovertJaguar <http://www.railcraft.info/>
*/
public interface IOwnable extends IWorldNameable {
/**
* Returns the GameProfile of the owner of the object.
*/
GameProfile getOwner();
}
|
# ... existing code ...
package mods.railcraft.api.core;
import com.mojang.authlib.GameProfile;
import net.minecraft.world.IWorldNameable;
/**
* Implemented by objects that can be owned.
# ... modified code ...
*
* @author CovertJaguar <http://www.railcraft.info/>
*/
public interface IOwnable extends IWorldNameable {
/**
* Returns the GameProfile of the owner of the object.
*/
GameProfile getOwner();
}
# ... rest of the code ...
|
f032501126e7bb6d86441e38112c6bdf5035c62e
|
icekit/search_indexes.py
|
icekit/search_indexes.py
|
from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
|
from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
from django.conf import settings
# Optional search indexes which can be used with the default FluentPage and FlatPage models.
if getattr(settings, 'ICEKIT_USE_SEARCH_INDEXES', True):
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
|
Add setting to turn of search indexes.
|
Add setting to turn of search indexes.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
python
|
## Code Before:
from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
## Instruction:
Add setting to turn of search indexes.
## Code After:
from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
from django.conf import settings
# Optional search indexes which can be used with the default FluentPage and FlatPage models.
if getattr(settings, 'ICEKIT_USE_SEARCH_INDEXES', True):
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
|
# ... existing code ...
from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
from django.conf import settings
# Optional search indexes which can be used with the default FluentPage and FlatPage models.
if getattr(settings, 'ICEKIT_USE_SEARCH_INDEXES', True):
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
# ... rest of the code ...
|
1fb091435e840cbed3ccb7431a34f33d123c8292
|
newTruckApp/src/main/java/app/ShippingTrackerObserver.java
|
newTruckApp/src/main/java/app/ShippingTrackerObserver.java
|
package app;
import java.util.Observable;
import java.util.Observer;
/**
* Created by Benjamin on 03/02/2016.
*/
public class ShippingTrackerObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
}
}
|
package app;
import app.action.Action;
import app.action.ActionEvent;
import app.action.Drop;
import app.shipper.BasicShipper;
import java.util.ArrayList;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
/**
* Created by Benjamin on 03/02/2016.
*/
public class ShippingTrackerObserver implements Observer {
List<String> achievedDeliveries = new ArrayList<>();
@Override
public void update(Observable o, Object arg) {
ActionEvent actionEvent = (ActionEvent) arg;
if(o instanceof Drop && actionEvent.equals(ActionEvent.ENDED)) {
Drop action = (Drop) o;
System.out.printf("\n\t[DELIVERY DONE] %s has performed package %s\n", action.getTarget().getName(), action.getTarget().getPack());
}
}
}
|
Add basic code into new Spy
|
Add basic code into new Spy
|
Java
|
mit
|
ttben/al-drone-delivery,ttben/al-drone-delivery
|
java
|
## Code Before:
package app;
import java.util.Observable;
import java.util.Observer;
/**
* Created by Benjamin on 03/02/2016.
*/
public class ShippingTrackerObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
}
}
## Instruction:
Add basic code into new Spy
## Code After:
package app;
import app.action.Action;
import app.action.ActionEvent;
import app.action.Drop;
import app.shipper.BasicShipper;
import java.util.ArrayList;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
/**
* Created by Benjamin on 03/02/2016.
*/
public class ShippingTrackerObserver implements Observer {
List<String> achievedDeliveries = new ArrayList<>();
@Override
public void update(Observable o, Object arg) {
ActionEvent actionEvent = (ActionEvent) arg;
if(o instanceof Drop && actionEvent.equals(ActionEvent.ENDED)) {
Drop action = (Drop) o;
System.out.printf("\n\t[DELIVERY DONE] %s has performed package %s\n", action.getTarget().getName(), action.getTarget().getPack());
}
}
}
|
# ... existing code ...
package app;
import app.action.Action;
import app.action.ActionEvent;
import app.action.Drop;
import app.shipper.BasicShipper;
import java.util.ArrayList;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
# ... modified code ...
* Created by Benjamin on 03/02/2016.
*/
public class ShippingTrackerObserver implements Observer {
List<String> achievedDeliveries = new ArrayList<>();
@Override
public void update(Observable o, Object arg) {
ActionEvent actionEvent = (ActionEvent) arg;
if(o instanceof Drop && actionEvent.equals(ActionEvent.ENDED)) {
Drop action = (Drop) o;
System.out.printf("\n\t[DELIVERY DONE] %s has performed package %s\n", action.getTarget().getName(), action.getTarget().getPack());
}
}
}
# ... rest of the code ...
|
fd1a0850f9c4c5c34accf64af47ac9bbf25faf74
|
setup.py
|
setup.py
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='[email protected]',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
]
)
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='[email protected]',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
|
Add aioauth-client into package install_requires
|
Add aioauth-client into package install_requires
|
Python
|
mit
|
dvhbru/dvhb-hybrid
|
python
|
## Code Before:
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='[email protected]',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
]
)
## Instruction:
Add aioauth-client into package install_requires
## Code After:
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='[email protected]',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
|
// ... existing code ...
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
// ... rest of the code ...
|
7a6e8af11ac28cf10e5ce33637bc883324dde641
|
game/models.py
|
game/models.py
|
from django.db import models
from django.utils import timezone
class Task(models.Model):
EQUALS_CHECK = 'EQ'
REGEX_CHECK = 'RE'
CHECK_CHOICES = (
(EQUALS_CHECK, 'Equals'),
(REGEX_CHECK, 'Regex'),
)
title_ru = models.CharField(null=False, blank=False, max_length=256)
title_en = models.CharField(null=False, blank=False, max_length=256)
desc_ru = models.TextField(null=False, blank=False)
desc_en = models.TextField(null=False, blank=False)
writeup_ru = models.TextField(null=False, blank=False)
writeup_en = models.TextField(null=False, blank=False)
flag = models.CharField(max_length=1024)
is_case_insensitive_check = models.BooleanField(default=False)
is_trimmed_check = models.BooleanField(default=False)
check = models.CharField(null=False, blank=False, max_length=2, choices=CHECK_CHOICES)
created_at = models.DateTimeField(null=False, blank=True)
def save(self, *args, **kwargs):
if self.pk is None:
self.created_at = timezone.now()
return super(Task, self).save(*args, **kwargs)
|
from django.db import models
from django.utils import timezone
class Task(models.Model):
EQUALS_CHECK = 'EQ'
REGEX_CHECK = 'RE'
CHECK_CHOICES = (
(EQUALS_CHECK, 'Equals'),
(REGEX_CHECK, 'Regex'),
)
title_ru = models.CharField(null=False, blank=False, max_length=256)
title_en = models.CharField(null=False, blank=False, max_length=256)
category = models.CharField(null=False, blank=False, max_length=256)
cost = models.IntegerField(null=False, blank=False)
desc_ru = models.TextField(null=False, blank=False)
desc_en = models.TextField(null=False, blank=False)
writeup_ru = models.TextField(null=False, blank=False)
writeup_en = models.TextField(null=False, blank=False)
flag = models.CharField(max_length=1024)
is_case_insensitive_check = models.BooleanField(default=False)
is_trimmed_check = models.BooleanField(default=False)
check = models.CharField(null=False, blank=False, max_length=2, choices=CHECK_CHOICES)
created_at = models.DateTimeField(null=False, blank=True)
def save(self, *args, **kwargs):
if self.pk is None:
self.created_at = timezone.now()
return super(Task, self).save(*args, **kwargs)
|
Add new fields to the task model
|
Add new fields to the task model
|
Python
|
bsd-3-clause
|
stefantsov/blackbox3,stefantsov/blackbox3,stefantsov/blackbox3
|
python
|
## Code Before:
from django.db import models
from django.utils import timezone
class Task(models.Model):
EQUALS_CHECK = 'EQ'
REGEX_CHECK = 'RE'
CHECK_CHOICES = (
(EQUALS_CHECK, 'Equals'),
(REGEX_CHECK, 'Regex'),
)
title_ru = models.CharField(null=False, blank=False, max_length=256)
title_en = models.CharField(null=False, blank=False, max_length=256)
desc_ru = models.TextField(null=False, blank=False)
desc_en = models.TextField(null=False, blank=False)
writeup_ru = models.TextField(null=False, blank=False)
writeup_en = models.TextField(null=False, blank=False)
flag = models.CharField(max_length=1024)
is_case_insensitive_check = models.BooleanField(default=False)
is_trimmed_check = models.BooleanField(default=False)
check = models.CharField(null=False, blank=False, max_length=2, choices=CHECK_CHOICES)
created_at = models.DateTimeField(null=False, blank=True)
def save(self, *args, **kwargs):
if self.pk is None:
self.created_at = timezone.now()
return super(Task, self).save(*args, **kwargs)
## Instruction:
Add new fields to the task model
## Code After:
from django.db import models
from django.utils import timezone
class Task(models.Model):
EQUALS_CHECK = 'EQ'
REGEX_CHECK = 'RE'
CHECK_CHOICES = (
(EQUALS_CHECK, 'Equals'),
(REGEX_CHECK, 'Regex'),
)
title_ru = models.CharField(null=False, blank=False, max_length=256)
title_en = models.CharField(null=False, blank=False, max_length=256)
category = models.CharField(null=False, blank=False, max_length=256)
cost = models.IntegerField(null=False, blank=False)
desc_ru = models.TextField(null=False, blank=False)
desc_en = models.TextField(null=False, blank=False)
writeup_ru = models.TextField(null=False, blank=False)
writeup_en = models.TextField(null=False, blank=False)
flag = models.CharField(max_length=1024)
is_case_insensitive_check = models.BooleanField(default=False)
is_trimmed_check = models.BooleanField(default=False)
check = models.CharField(null=False, blank=False, max_length=2, choices=CHECK_CHOICES)
created_at = models.DateTimeField(null=False, blank=True)
def save(self, *args, **kwargs):
if self.pk is None:
self.created_at = timezone.now()
return super(Task, self).save(*args, **kwargs)
|
# ... existing code ...
title_ru = models.CharField(null=False, blank=False, max_length=256)
title_en = models.CharField(null=False, blank=False, max_length=256)
category = models.CharField(null=False, blank=False, max_length=256)
cost = models.IntegerField(null=False, blank=False)
desc_ru = models.TextField(null=False, blank=False)
desc_en = models.TextField(null=False, blank=False)
writeup_ru = models.TextField(null=False, blank=False)
# ... rest of the code ...
|
996efffa55f599c543df2439cb6ddff2af48237e
|
src/main/java/se/fnord/katydid/internal/CompositeTester.java
|
src/main/java/se/fnord/katydid/internal/CompositeTester.java
|
package se.fnord.katydid.internal;
import se.fnord.katydid.ComparisonStatus;
import se.fnord.katydid.DataTester;
import se.fnord.katydid.TestingContext;
import java.nio.ByteBuffer;
import static java.lang.Math.max;
public abstract class CompositeTester extends AbstractTester {
private final DataTester[] values;
public CompositeTester(String name, DataTester... values) {
super(name);
this.values = values;
}
@Override
public ComparisonStatus compareItem(TestingContext context, int pass, int itemIndex) {
return context.compareTo(values[itemIndex], pass);
}
@Override
public int passCount() {
int mp = 0;
for (DataTester c : values)
mp = max(mp, c.passCount());
return mp;
}
@Override
public int lengthOfItem(int itemIndex) {
return values[itemIndex].length();
}
@Override
public int itemCount() {
return values.length;
}
@Override
public String formatItem(String name, int itemIndex) {
return formatChild(itemIndex, values[itemIndex]);
checkItemIndex(itemIndex);
}
@Override
public void toBuffer(ByteBuffer bb) {
for (DataTester c : values) {
c.toBuffer(bb);
}
}
}
|
package se.fnord.katydid.internal;
import se.fnord.katydid.ComparisonStatus;
import se.fnord.katydid.DataTester;
import se.fnord.katydid.TestingContext;
import java.nio.ByteBuffer;
import static java.lang.Math.max;
public abstract class CompositeTester extends AbstractTester {
private final DataTester[] values;
public CompositeTester(String name, DataTester... values) {
super(name);
this.values = values;
}
@Override
public ComparisonStatus compareItem(TestingContext context, int pass, int itemIndex) {
return context.compareTo(values[itemIndex], pass);
}
@Override
public int passCount() {
int mp = 0;
for (DataTester c : values)
mp = max(mp, c.passCount());
return mp;
}
@Override
public int lengthOfItem(int itemIndex) {
return values[itemIndex].length();
}
@Override
public int itemCount() {
return values.length;
}
@Override
public String formatItem(String name, int itemIndex) {
checkItemIndex(itemIndex);
return name + formatChild(itemIndex, values[itemIndex]);
}
@Override
public void toBuffer(ByteBuffer bb) {
for (DataTester c : values) {
c.toBuffer(bb);
}
}
}
|
Prepend the tester name in the composite item name formatter
|
Prepend the tester name in the composite item name formatter
|
Java
|
isc
|
gsson/katydid
|
java
|
## Code Before:
package se.fnord.katydid.internal;
import se.fnord.katydid.ComparisonStatus;
import se.fnord.katydid.DataTester;
import se.fnord.katydid.TestingContext;
import java.nio.ByteBuffer;
import static java.lang.Math.max;
public abstract class CompositeTester extends AbstractTester {
private final DataTester[] values;
public CompositeTester(String name, DataTester... values) {
super(name);
this.values = values;
}
@Override
public ComparisonStatus compareItem(TestingContext context, int pass, int itemIndex) {
return context.compareTo(values[itemIndex], pass);
}
@Override
public int passCount() {
int mp = 0;
for (DataTester c : values)
mp = max(mp, c.passCount());
return mp;
}
@Override
public int lengthOfItem(int itemIndex) {
return values[itemIndex].length();
}
@Override
public int itemCount() {
return values.length;
}
@Override
public String formatItem(String name, int itemIndex) {
return formatChild(itemIndex, values[itemIndex]);
checkItemIndex(itemIndex);
}
@Override
public void toBuffer(ByteBuffer bb) {
for (DataTester c : values) {
c.toBuffer(bb);
}
}
}
## Instruction:
Prepend the tester name in the composite item name formatter
## Code After:
package se.fnord.katydid.internal;
import se.fnord.katydid.ComparisonStatus;
import se.fnord.katydid.DataTester;
import se.fnord.katydid.TestingContext;
import java.nio.ByteBuffer;
import static java.lang.Math.max;
public abstract class CompositeTester extends AbstractTester {
private final DataTester[] values;
public CompositeTester(String name, DataTester... values) {
super(name);
this.values = values;
}
@Override
public ComparisonStatus compareItem(TestingContext context, int pass, int itemIndex) {
return context.compareTo(values[itemIndex], pass);
}
@Override
public int passCount() {
int mp = 0;
for (DataTester c : values)
mp = max(mp, c.passCount());
return mp;
}
@Override
public int lengthOfItem(int itemIndex) {
return values[itemIndex].length();
}
@Override
public int itemCount() {
return values.length;
}
@Override
public String formatItem(String name, int itemIndex) {
checkItemIndex(itemIndex);
return name + formatChild(itemIndex, values[itemIndex]);
}
@Override
public void toBuffer(ByteBuffer bb) {
for (DataTester c : values) {
c.toBuffer(bb);
}
}
}
|
// ... existing code ...
@Override
public String formatItem(String name, int itemIndex) {
checkItemIndex(itemIndex);
return name + formatChild(itemIndex, values[itemIndex]);
}
@Override
// ... rest of the code ...
|
28cdad6e8ab6bd400ef50331a2f93af93620cc7f
|
app/models.py
|
app/models.py
|
from django.db import models
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
|
from django.db import models
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
def time(self):
return '{:%H:%M}'.format(self.when)
|
Return human-sensible time in Event
|
Return human-sensible time in Event
|
Python
|
mit
|
schatten/logan
|
python
|
## Code Before:
from django.db import models
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
## Instruction:
Return human-sensible time in Event
## Code After:
from django.db import models
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
def time(self):
return '{:%H:%M}'.format(self.when)
|
...
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
def time(self):
return '{:%H:%M}'.format(self.when)
...
|
29a1a39cf4f0fed6999bd787cce7e8e65c49ef4e
|
display_image.py
|
display_image.py
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
Set image to fill the entire matplotlib window
|
Set image to fill the entire matplotlib window
|
Python
|
mit
|
crowsonkb/style_transfer,crowsonkb/style_transfer,crowsonkb/style_transfer,crowsonkb/style_transfer
|
python
|
## Code Before:
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
## Instruction:
Set image to fill the entire matplotlib window
## Code After:
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
...
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
...
|
0b7c27fec5b1b7ececfcf7556f415e8e53cf69b6
|
v1.0/v1.0/search.py
|
v1.0/v1.0/search.py
|
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print linenum, l.rstrip()
break
|
from __future__ import print_function
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
|
Make conformance test 55 compatible with Python 3
|
Make conformance test 55 compatible with Python 3
|
Python
|
apache-2.0
|
curoverse/common-workflow-language,curoverse/common-workflow-language,mr-c/common-workflow-language,common-workflow-language/common-workflow-language,mr-c/common-workflow-language,dleehr/common-workflow-language,dleehr/common-workflow-language,common-workflow-language/common-workflow-language,dleehr/common-workflow-language,mr-c/common-workflow-language,common-workflow-language/common-workflow-language,common-workflow-language/common-workflow-language,dleehr/common-workflow-language
|
python
|
## Code Before:
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print linenum, l.rstrip()
break
## Instruction:
Make conformance test 55 compatible with Python 3
## Code After:
from __future__ import print_function
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
|
...
from __future__ import print_function
import sys
...
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
...
|
0336dbba661a6d1035c06bdc20de46bcee5f3876
|
src/java/guangyin/internal/types/TemporalAccessorWrapper.java
|
src/java/guangyin/internal/types/TemporalAccessorWrapper.java
|
package guangyin.internal.types;
import clojure.lang.Keyword;
import clojure.lang.ILookup;
import clojure.lang.IPersistentMap;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalAccessor;
public class TemporalAccessorWrapper extends ObjectWrapper implements ILookup {
protected IPersistentMap keymap;
private TemporalAccessor wrapped;
public TemporalAccessorWrapper(IPersistentMap keymap, TemporalAccessor wrapped) {
super(wrapped);
this.keymap = keymap;
this.wrapped = wrapped;
}
public Object valAt(Object key) {
return this.valAt(key, null);
}
public Object valAt(Object key, Object notFound) {
Keyword keyword = (Keyword) key;
TemporalField field = (TemporalField) keyword.invoke(this.keymap);
if (field == null || !wrapped.isSupported(field)) {
return notFound;
}
return wrapped.getLong(field);
}
}
|
package guangyin.internal.types;
import clojure.lang.Keyword;
import clojure.lang.ILookup;
import clojure.lang.IMapEntry;
import clojure.lang.IPersistentMap;
import clojure.lang.PersistentArrayMap;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalAccessor;
public class TemporalAccessorWrapper extends ObjectWrapper implements ILookup {
private TemporalAccessor wrapped;
protected IPersistentMap keymap;
protected IPersistentMap valmap;
public TemporalAccessorWrapper(IPersistentMap keymap, TemporalAccessor wrapped) {
super(wrapped);
this.wrapped = wrapped;
this.keymap = keymap;
IPersistentMap valmap = PersistentArrayMap.EMPTY;
for (Object val : keymap) {
IMapEntry entry = (IMapEntry) val;
TemporalField field = (TemporalField) entry.val();
if (wrapped.isSupported(field)) {
valmap = valmap.assoc(entry.key(), wrapped.getLong(field));
}
}
this.valmap = valmap;
}
public Object valAt(Object key) {
return this.valmap.valAt(key);
}
public Object valAt(Object key, Object notFound) {
return this.valmap.valAt(key, notFound);
}
}
|
Create an actual map of TemporalAccessor in wrapper.
|
Create an actual map of TemporalAccessor in wrapper.
|
Java
|
mit
|
juhovh/guangyin
|
java
|
## Code Before:
package guangyin.internal.types;
import clojure.lang.Keyword;
import clojure.lang.ILookup;
import clojure.lang.IPersistentMap;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalAccessor;
public class TemporalAccessorWrapper extends ObjectWrapper implements ILookup {
protected IPersistentMap keymap;
private TemporalAccessor wrapped;
public TemporalAccessorWrapper(IPersistentMap keymap, TemporalAccessor wrapped) {
super(wrapped);
this.keymap = keymap;
this.wrapped = wrapped;
}
public Object valAt(Object key) {
return this.valAt(key, null);
}
public Object valAt(Object key, Object notFound) {
Keyword keyword = (Keyword) key;
TemporalField field = (TemporalField) keyword.invoke(this.keymap);
if (field == null || !wrapped.isSupported(field)) {
return notFound;
}
return wrapped.getLong(field);
}
}
## Instruction:
Create an actual map of TemporalAccessor in wrapper.
## Code After:
package guangyin.internal.types;
import clojure.lang.Keyword;
import clojure.lang.ILookup;
import clojure.lang.IMapEntry;
import clojure.lang.IPersistentMap;
import clojure.lang.PersistentArrayMap;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalAccessor;
public class TemporalAccessorWrapper extends ObjectWrapper implements ILookup {
private TemporalAccessor wrapped;
protected IPersistentMap keymap;
protected IPersistentMap valmap;
public TemporalAccessorWrapper(IPersistentMap keymap, TemporalAccessor wrapped) {
super(wrapped);
this.wrapped = wrapped;
this.keymap = keymap;
IPersistentMap valmap = PersistentArrayMap.EMPTY;
for (Object val : keymap) {
IMapEntry entry = (IMapEntry) val;
TemporalField field = (TemporalField) entry.val();
if (wrapped.isSupported(field)) {
valmap = valmap.assoc(entry.key(), wrapped.getLong(field));
}
}
this.valmap = valmap;
}
public Object valAt(Object key) {
return this.valmap.valAt(key);
}
public Object valAt(Object key, Object notFound) {
return this.valmap.valAt(key, notFound);
}
}
|
// ... existing code ...
import clojure.lang.Keyword;
import clojure.lang.ILookup;
import clojure.lang.IMapEntry;
import clojure.lang.IPersistentMap;
import clojure.lang.PersistentArrayMap;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalAccessor;
public class TemporalAccessorWrapper extends ObjectWrapper implements ILookup {
private TemporalAccessor wrapped;
protected IPersistentMap keymap;
protected IPersistentMap valmap;
public TemporalAccessorWrapper(IPersistentMap keymap, TemporalAccessor wrapped) {
super(wrapped);
this.wrapped = wrapped;
this.keymap = keymap;
IPersistentMap valmap = PersistentArrayMap.EMPTY;
for (Object val : keymap) {
IMapEntry entry = (IMapEntry) val;
TemporalField field = (TemporalField) entry.val();
if (wrapped.isSupported(field)) {
valmap = valmap.assoc(entry.key(), wrapped.getLong(field));
}
}
this.valmap = valmap;
}
public Object valAt(Object key) {
return this.valmap.valAt(key);
}
public Object valAt(Object key, Object notFound) {
return this.valmap.valAt(key, notFound);
}
}
// ... rest of the code ...
|
15a5e6c1aca706330147475984848dfc33fd1a9d
|
common/djangoapps/mitxmako/tests.py
|
common/djangoapps/mitxmako/tests.py
|
from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'about_edx'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
expected_link = reverse('about_edx')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
|
from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
|
Fix test so that it works with both CMS and LMS settings
|
Fix test so that it works with both CMS and LMS settings
|
Python
|
agpl-3.0
|
nanolearningllc/edx-platform-cypress,jjmiranda/edx-platform,SivilTaram/edx-platform,olexiim/edx-platform,eduNEXT/edx-platform,bdero/edx-platform,olexiim/edx-platform,nanolearningllc/edx-platform-cypress-2,cyanna/edx-platform,rismalrv/edx-platform,don-github/edx-platform,unicri/edx-platform,xuxiao19910803/edx-platform,Unow/edx-platform,kxliugang/edx-platform,shabab12/edx-platform,Edraak/edraak-platform,rismalrv/edx-platform,DefyVentures/edx-platform,eduNEXT/edunext-platform,beacloudgenius/edx-platform,mjirayu/sit_academy,LICEF/edx-platform,wwj718/edx-platform,EduPepperPDTesting/pepper2013-testing,mitocw/edx-platform,vismartltd/edx-platform,alexthered/kienhoc-platform,devs1991/test_edx_docmode,dkarakats/edx-platform,ak2703/edx-platform,wwj718/edx-platform,zhenzhai/edx-platform,jswope00/GAI,yokose-ks/edx-platform,Kalyzee/edx-platform,inares/edx-platform,mbareta/edx-platform-ft,PepperPD/edx-pepper-platform,arifsetiawan/edx-platform,itsjeyd/edx-platform,PepperPD/edx-pepper-platform,mbareta/edx-platform-ft,eestay/edx-platform,jamiefolsom/edx-platform,martynovp/edx-platform,DefyVentures/edx-platform,vasyarv/edx-platform,zofuthan/edx-platform,antoviaque/edx-platform,zadgroup/edx-platform,dsajkl/123,waheedahmed/edx-platform,arbrandes/edx-platform,hkawasaki/kawasaki-aio8-2,olexiim/edx-platform,nttks/jenkins-test,Softmotions/edx-platform,longmen21/edx-platform,solashirai/edx-platform,polimediaupv/edx-platform,torchingloom/edx-platform,xuxiao19910803/edx-platform,adoosii/edx-platform,Unow/edx-platform,ubc/edx-platform,pomegranited/edx-platform,jruiperezv/ANALYSE,cpennington/edx-platform,fly19890211/edx-platform,leansoft/edx-platform,raccoongang/edx-platform,yokose-ks/edx-platform,Livit/Livit.Learn.EdX,kursitet/edx-platform,J861449197/edx-platform,Edraak/circleci-edx-platform,hkawasaki/kawasaki-aio8-1,dcosentino/edx-platform,hkawasaki/kawasaki-aio8-0,playm2mboy/edx-platform,stvstnfrd/edx-platform,martynovp/edx-platform,UXE/local-edx,OmarIthawi/edx-platform,valtech-mooc/edx-platform,sameetb-cuelogic/edx-platform-test,nanolearning/edx-platform,teltek/edx-platform,Lektorium-LLC/edx-platform,Ayub-Khan/edx-platform,xuxiao19910803/edx-platform,rismalrv/edx-platform,mjirayu/sit_academy,etzhou/edx-platform,dkarakats/edx-platform,eduNEXT/edx-platform,EduPepperPD/pepper2013,morpheby/levelup-by,Unow/edx-platform,abdoosh00/edx-rtl-final,shurihell/testasia,eestay/edx-platform,yokose-ks/edx-platform,msegado/edx-platform,cognitiveclass/edx-platform,WatanabeYasumasa/edx-platform,sameetb-cuelogic/edx-platform-test,shashank971/edx-platform,appsembler/edx-platform,defance/edx-platform,waheedahmed/edx-platform,UOMx/edx-platform,CourseTalk/edx-platform,gsehub/edx-platform,rationalAgent/edx-platform-custom,abdoosh00/edx-rtl-final,peterm-itr/edx-platform,utecuy/edx-platform,jazkarta/edx-platform-for-isc,ahmadiga/min_edx,synergeticsedx/deployment-wipro,ESOedX/edx-platform,andyzsf/edx,ovnicraft/edx-platform,cyanna/edx-platform,TsinghuaX/edx-platform,rhndg/openedx,alexthered/kienhoc-platform,TsinghuaX/edx-platform,carsongee/edx-platform,franosincic/edx-platform,xuxiao19910803/edx-platform,TsinghuaX/edx-platform,benpatterson/edx-platform,kmoocdev/edx-platform,DNFcode/edx-platform,hastexo/edx-platform,kursitet/edx-platform,IndonesiaX/edx-platform,cselis86/edx-platform,a-parhom/edx-platform,stvstnfrd/edx-platform,BehavioralInsightsTeam/edx-platform,shashank971/edx-platform,rue89-tech/edx-platform,bdero/edx-platform,itsjeyd/edx-platform,atsolakid/edx-platform,ahmadiga/min_edx,vikas1885/test1,fintech-circle/edx-platform,hmcmooc/muddx-platform,LICEF/edx-platform,gymnasium/edx-platform,ferabra/edx-platform,jazkarta/edx-platform,romain-li/edx-platform,eemirtekin/edx-platform,deepsrijit1105/edx-platform,ampax/edx-platform,nanolearningllc/edx-platform-cypress-2,eduNEXT/edunext-platform,IONISx/edx-platform,jruiperezv/ANALYSE,utecuy/edx-platform,antoviaque/edx-platform,chudaol/edx-platform,zadgroup/edx-platform,hkawasaki/kawasaki-aio8-0,hkawasaki/kawasaki-aio8-1,zadgroup/edx-platform,caesar2164/edx-platform,nanolearning/edx-platform,jolyonb/edx-platform,zubair-arbi/edx-platform,msegado/edx-platform,Endika/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress-2,ahmadio/edx-platform,kamalx/edx-platform,mahendra-r/edx-platform,TeachAtTUM/edx-platform,jamiefolsom/edx-platform,hkawasaki/kawasaki-aio8-1,ampax/edx-platform,itsjeyd/edx-platform,valtech-mooc/edx-platform,EDUlib/edx-platform,shubhdev/edxOnBaadal,eestay/edx-platform,chand3040/cloud_that,LICEF/edx-platform,RPI-OPENEDX/edx-platform,solashirai/edx-platform,zerobatu/edx-platform,eestay/edx-platform,jswope00/GAI,jonathan-beard/edx-platform,pelikanchik/edx-platform,wwj718/ANALYSE,beacloudgenius/edx-platform,jamesblunt/edx-platform,romain-li/edx-platform,TeachAtTUM/edx-platform,simbs/edx-platform,synergeticsedx/deployment-wipro,Lektorium-LLC/edx-platform,Kalyzee/edx-platform,RPI-OPENEDX/edx-platform,DefyVentures/edx-platform,arbrandes/edx-platform,CredoReference/edx-platform,chauhanhardik/populo_2,apigee/edx-platform,ampax/edx-platform-backup,doganov/edx-platform,AkA84/edx-platform,IONISx/edx-platform,ahmadio/edx-platform,MSOpenTech/edx-platform,utecuy/edx-platform,praveen-pal/edx-platform,zofuthan/edx-platform,hkawasaki/kawasaki-aio8-2,LearnEra/LearnEraPlaftform,romain-li/edx-platform,hmcmooc/muddx-platform,rhndg/openedx,CredoReference/edx-platform,motion2015/a3,abdoosh00/edraak,shubhdev/openedx,jolyonb/edx-platform,jazkarta/edx-platform-for-isc,mushtaqak/edx-platform,cecep-edu/edx-platform,sameetb-cuelogic/edx-platform-test,ZLLab-Mooc/edx-platform,DNFcode/edx-platform,B-MOOC/edx-platform,Livit/Livit.Learn.EdX,UOMx/edx-platform,B-MOOC/edx-platform,J861449197/edx-platform,tiagochiavericosta/edx-platform,utecuy/edx-platform,halvertoluke/edx-platform,don-github/edx-platform,jelugbo/tundex,AkA84/edx-platform,teltek/edx-platform,ovnicraft/edx-platform,cselis86/edx-platform,pomegranited/edx-platform,ubc/edx-platform,miptliot/edx-platform,hamzehd/edx-platform,jazkarta/edx-platform-for-isc,gymnasium/edx-platform,Semi-global/edx-platform,jelugbo/tundex,bigdatauniversity/edx-platform,PepperPD/edx-pepper-platform,motion2015/edx-platform,zhenzhai/edx-platform,franosincic/edx-platform,fly19890211/edx-platform,leansoft/edx-platform,philanthropy-u/edx-platform,ahmadiga/min_edx,SravanthiSinha/edx-platform,jbzdak/edx-platform,proversity-org/edx-platform,Endika/edx-platform,kmoocdev2/edx-platform,zofuthan/edx-platform,Livit/Livit.Learn.EdX,atsolakid/edx-platform,hkawasaki/kawasaki-aio8-2,pomegranited/edx-platform,Ayub-Khan/edx-platform,doganov/edx-platform,marcore/edx-platform,jzoldak/edx-platform,jzoldak/edx-platform,nttks/jenkins-test,openfun/edx-platform,eemirtekin/edx-platform,jamesblunt/edx-platform,Kalyzee/edx-platform,JCBarahona/edX,RPI-OPENEDX/edx-platform,JioEducation/edx-platform,Stanford-Online/edx-platform,JioEducation/edx-platform,playm2mboy/edx-platform,solashirai/edx-platform,devs1991/test_edx_docmode,wwj718/edx-platform,xuxiao19910803/edx,mjirayu/sit_academy,cognitiveclass/edx-platform,xingyepei/edx-platform,4eek/edx-platform,yokose-ks/edx-platform,Edraak/circleci-edx-platform,Edraak/edraak-platform,y12uc231/edx-platform,xingyepei/edx-platform,inares/edx-platform,miptliot/edx-platform,pabloborrego93/edx-platform,longmen21/edx-platform,proversity-org/edx-platform,chauhanhardik/populo_2,hamzehd/edx-platform,cselis86/edx-platform,appliedx/edx-platform,xuxiao19910803/edx,lduarte1991/edx-platform,nagyistoce/edx-platform,shashank971/edx-platform,polimediaupv/edx-platform,praveen-pal/edx-platform,shubhdev/openedx,edx-solutions/edx-platform,cyanna/edx-platform,hamzehd/edx-platform,EduPepperPDTesting/pepper2013-testing,romain-li/edx-platform,SivilTaram/edx-platform,EduPepperPD/pepper2013,vasyarv/edx-platform,jbassen/edx-platform,miptliot/edx-platform,nagyistoce/edx-platform,PepperPD/edx-pepper-platform,deepsrijit1105/edx-platform,zadgroup/edx-platform,4eek/edx-platform,unicri/edx-platform,kxliugang/edx-platform,RPI-OPENEDX/edx-platform,deepsrijit1105/edx-platform,mushtaqak/edx-platform,ovnicraft/edx-platform,zerobatu/edx-platform,zofuthan/edx-platform,naresh21/synergetics-edx-platform,jruiperezv/ANALYSE,inares/edx-platform,motion2015/a3,shubhdev/edx-platform,motion2015/a3,OmarIthawi/edx-platform,longmen21/edx-platform,kamalx/edx-platform,mbareta/edx-platform-ft,stvstnfrd/edx-platform,sudheerchintala/LearnEraPlatForm,waheedahmed/edx-platform,vasyarv/edx-platform,jbassen/edx-platform,morpheby/levelup-by,kamalx/edx-platform,kmoocdev/edx-platform,doganov/edx-platform,carsongee/edx-platform,appliedx/edx-platform,MSOpenTech/edx-platform,TeachAtTUM/edx-platform,Kalyzee/edx-platform,zhenzhai/edx-platform,pku9104038/edx-platform,msegado/edx-platform,vismartltd/edx-platform,torchingloom/edx-platform,shashank971/edx-platform,EduPepperPD/pepper2013,nanolearning/edx-platform,cognitiveclass/edx-platform,chrisndodge/edx-platform,tiagochiavericosta/edx-platform,jzoldak/edx-platform,simbs/edx-platform,pabloborrego93/edx-platform,torchingloom/edx-platform,rue89-tech/edx-platform,jolyonb/edx-platform,MSOpenTech/edx-platform,auferack08/edx-platform,nikolas/edx-platform,Endika/edx-platform,EDUlib/edx-platform,4eek/edx-platform,SivilTaram/edx-platform,CourseTalk/edx-platform,xinjiguaike/edx-platform,syjeon/new_edx,kmoocdev/edx-platform,kxliugang/edx-platform,kalebhartje/schoolboost,Edraak/edx-platform,shabab12/edx-platform,nagyistoce/edx-platform,synergeticsedx/deployment-wipro,ZLLab-Mooc/edx-platform,nttks/jenkins-test,pku9104038/edx-platform,philanthropy-u/edx-platform,teltek/edx-platform,Semi-global/edx-platform,dsajkl/123,IONISx/edx-platform,chauhanhardik/populo,JioEducation/edx-platform,lduarte1991/edx-platform,zubair-arbi/edx-platform,JioEducation/edx-platform,xinjiguaike/edx-platform,abdoosh00/edx-rtl-final,B-MOOC/edx-platform,nikolas/edx-platform,nttks/edx-platform,etzhou/edx-platform,cpennington/edx-platform,mushtaqak/edx-platform,Semi-global/edx-platform,SravanthiSinha/edx-platform,angelapper/edx-platform,playm2mboy/edx-platform,inares/edx-platform,jelugbo/tundex,EduPepperPDTesting/pepper2013-testing,pdehaye/theming-edx-platform,LICEF/edx-platform,xinjiguaike/edx-platform,openfun/edx-platform,unicri/edx-platform,Shrhawk/edx-platform,don-github/edx-platform,edry/edx-platform,jazztpt/edx-platform,torchingloom/edx-platform,tanmaykm/edx-platform,beacloudgenius/edx-platform,nttks/edx-platform,IITBinterns13/edx-platform-dev,kalebhartje/schoolboost,doismellburning/edx-platform,franosincic/edx-platform,vismartltd/edx-platform,arifsetiawan/edx-platform,kalebhartje/schoolboost,jruiperezv/ANALYSE,ovnicraft/edx-platform,kmoocdev2/edx-platform,jamesblunt/edx-platform,nttks/jenkins-test,alu042/edx-platform,chauhanhardik/populo_2,mtlchun/edx,syjeon/new_edx,sudheerchintala/LearnEraPlatForm,simbs/edx-platform,hamzehd/edx-platform,hastexo/edx-platform,10clouds/edx-platform,IITBinterns13/edx-platform-dev,valtech-mooc/edx-platform,B-MOOC/edx-platform,etzhou/edx-platform,mahendra-r/edx-platform,nanolearningllc/edx-platform-cypress,jazztpt/edx-platform,dkarakats/edx-platform,mjg2203/edx-platform-seas,procangroup/edx-platform,antonve/s4-project-mooc,BehavioralInsightsTeam/edx-platform,xingyepei/edx-platform,rue89-tech/edx-platform,J861449197/edx-platform,jswope00/GAI,benpatterson/edx-platform,beni55/edx-platform,pabloborrego93/edx-platform,beni55/edx-platform,pomegranited/edx-platform,jazztpt/edx-platform,prarthitm/edxplatform,unicri/edx-platform,vismartltd/edx-platform,arifsetiawan/edx-platform,MSOpenTech/edx-platform,MakeHer/edx-platform,etzhou/edx-platform,BehavioralInsightsTeam/edx-platform,zerobatu/edx-platform,shubhdev/edxOnBaadal,Shrhawk/edx-platform,edry/edx-platform,abdoosh00/edraak,jazkarta/edx-platform,pku9104038/edx-platform,nikolas/edx-platform,mahendra-r/edx-platform,stvstnfrd/edx-platform,mushtaqak/edx-platform,valtech-mooc/edx-platform,nanolearningllc/edx-platform-cypress,MakeHer/edx-platform,procangroup/edx-platform,ampax/edx-platform-backup,knehez/edx-platform,lduarte1991/edx-platform,zhenzhai/edx-platform,ferabra/edx-platform,benpatterson/edx-platform,andyzsf/edx,abdoosh00/edraak,Edraak/edx-platform,fintech-circle/edx-platform,wwj718/edx-platform,jonathan-beard/edx-platform,mahendra-r/edx-platform,Kalyzee/edx-platform,eestay/edx-platform,doismellburning/edx-platform,chudaol/edx-platform,bitifirefly/edx-platform,devs1991/test_edx_docmode,knehez/edx-platform,antoviaque/edx-platform,cecep-edu/edx-platform,alu042/edx-platform,solashirai/edx-platform,beacloudgenius/edx-platform,Unow/edx-platform,a-parhom/edx-platform,jbassen/edx-platform,dcosentino/edx-platform,jswope00/GAI,dsajkl/reqiop,y12uc231/edx-platform,olexiim/edx-platform,playm2mboy/edx-platform,beni55/edx-platform,knehez/edx-platform,J861449197/edx-platform,kalebhartje/schoolboost,mjg2203/edx-platform-seas,defance/edx-platform,mitocw/edx-platform,praveen-pal/edx-platform,ampax/edx-platform-backup,RPI-OPENEDX/edx-platform,chand3040/cloud_that,BehavioralInsightsTeam/edx-platform,jbassen/edx-platform,alu042/edx-platform,adoosii/edx-platform,martynovp/edx-platform,IONISx/edx-platform,jolyonb/edx-platform,dsajkl/123,wwj718/edx-platform,chauhanhardik/populo,chudaol/edx-platform,cognitiveclass/edx-platform,ampax/edx-platform,utecuy/edx-platform,bitifirefly/edx-platform,eemirtekin/edx-platform,J861449197/edx-platform,UOMx/edx-platform,JCBarahona/edX,apigee/edx-platform,caesar2164/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,ZLLab-Mooc/edx-platform,philanthropy-u/edx-platform,tiagochiavericosta/edx-platform,vasyarv/edx-platform,Stanford-Online/edx-platform,morenopc/edx-platform,hastexo/edx-platform,jjmiranda/edx-platform,shashank971/edx-platform,fly19890211/edx-platform,jzoldak/edx-platform,polimediaupv/edx-platform,shurihell/testasia,jbassen/edx-platform,chrisndodge/edx-platform,xingyepei/edx-platform,kamalx/edx-platform,edx-solutions/edx-platform,ahmedaljazzar/edx-platform,vismartltd/edx-platform,benpatterson/edx-platform,CredoReference/edx-platform,dsajkl/123,rationalAgent/edx-platform-custom,OmarIthawi/edx-platform,Edraak/circleci-edx-platform,EDUlib/edx-platform,leansoft/edx-platform,iivic/BoiseStateX,antonve/s4-project-mooc,carsongee/edx-platform,louyihua/edx-platform,jbzdak/edx-platform,kursitet/edx-platform,philanthropy-u/edx-platform,PepperPD/edx-pepper-platform,zubair-arbi/edx-platform,praveen-pal/edx-platform,defance/edx-platform,rismalrv/edx-platform,ubc/edx-platform,mbareta/edx-platform-ft,WatanabeYasumasa/edx-platform,peterm-itr/edx-platform,arbrandes/edx-platform,ampax/edx-platform-backup,appliedx/edx-platform,morenopc/edx-platform,CredoReference/edx-platform,antonve/s4-project-mooc,fly19890211/edx-platform,rationalAgent/edx-platform-custom,jazkarta/edx-platform,eemirtekin/edx-platform,mjirayu/sit_academy,LICEF/edx-platform,analyseuc3m/ANALYSE-v1,cpennington/edx-platform,pdehaye/theming-edx-platform,analyseuc3m/ANALYSE-v1,ahmadiga/min_edx,chauhanhardik/populo_2,syjeon/new_edx,halvertoluke/edx-platform,Semi-global/edx-platform,bigdatauniversity/edx-platform,shurihell/testasia,fintech-circle/edx-platform,hkawasaki/kawasaki-aio8-0,kmoocdev/edx-platform,sudheerchintala/LearnEraPlatForm,msegado/edx-platform,jazztpt/edx-platform,EduPepperPD/pepper2013,4eek/edx-platform,dsajkl/reqiop,Endika/edx-platform,mcgachey/edx-platform,pku9104038/edx-platform,motion2015/a3,leansoft/edx-platform,shubhdev/openedx,SivilTaram/edx-platform,raccoongang/edx-platform,chrisndodge/edx-platform,mjg2203/edx-platform-seas,valtech-mooc/edx-platform,pdehaye/theming-edx-platform,jswope00/griffinx,ahmedaljazzar/edx-platform,atsolakid/edx-platform,edx/edx-platform,nikolas/edx-platform,ovnicraft/edx-platform,ZLLab-Mooc/edx-platform,peterm-itr/edx-platform,unicri/edx-platform,dkarakats/edx-platform,mitocw/edx-platform,10clouds/edx-platform,devs1991/test_edx_docmode,franosincic/edx-platform,mtlchun/edx,LearnEra/LearnEraPlaftform,analyseuc3m/ANALYSE-v1,jazkarta/edx-platform-for-isc,waheedahmed/edx-platform,edx/edx-platform,nanolearningllc/edx-platform-cypress-2,defance/edx-platform,Edraak/circleci-edx-platform,mushtaqak/edx-platform,nanolearningllc/edx-platform-cypress-2,tiagochiavericosta/edx-platform,polimediaupv/edx-platform,Shrhawk/edx-platform,dcosentino/edx-platform,IITBinterns13/edx-platform-dev,EduPepperPD/pepper2013,jbzdak/edx-platform,don-github/edx-platform,torchingloom/edx-platform,gsehub/edx-platform,vasyarv/edx-platform,angelapper/edx-platform,ESOedX/edx-platform,kmoocdev/edx-platform,simbs/edx-platform,LearnEra/LearnEraPlaftform,antonve/s4-project-mooc,itsjeyd/edx-platform,Lektorium-LLC/edx-platform,marcore/edx-platform,appsembler/edx-platform,AkA84/edx-platform,fintech-circle/edx-platform,tiagochiavericosta/edx-platform,a-parhom/edx-platform,appsembler/edx-platform,cselis86/edx-platform,iivic/BoiseStateX,ahmedaljazzar/edx-platform,chauhanhardik/populo,hmcmooc/muddx-platform,halvertoluke/edx-platform,sudheerchintala/LearnEraPlatForm,pelikanchik/edx-platform,zofuthan/edx-platform,TeachAtTUM/edx-platform,tanmaykm/edx-platform,bdero/edx-platform,procangroup/edx-platform,hamzehd/edx-platform,appliedx/edx-platform,mitocw/edx-platform,etzhou/edx-platform,shurihell/testasia,vikas1885/test1,chauhanhardik/populo,IONISx/edx-platform,Edraak/circleci-edx-platform,rismalrv/edx-platform,shubhdev/edx-platform,dsajkl/reqiop,alexthered/kienhoc-platform,DNFcode/edx-platform,hastexo/edx-platform,doganov/edx-platform,cselis86/edx-platform,don-github/edx-platform,jbzdak/edx-platform,EduPepperPDTesting/pepper2013-testing,jonathan-beard/edx-platform,openfun/edx-platform,analyseuc3m/ANALYSE-v1,pomegranited/edx-platform,arifsetiawan/edx-platform,AkA84/edx-platform,miptliot/edx-platform,jswope00/griffinx,OmarIthawi/edx-platform,edry/edx-platform,auferack08/edx-platform,jamiefolsom/edx-platform,nttks/edx-platform,jamesblunt/edx-platform,xingyepei/edx-platform,bigdatauniversity/edx-platform,bitifirefly/edx-platform,Softmotions/edx-platform,bigdatauniversity/edx-platform,jbzdak/edx-platform,playm2mboy/edx-platform,rationalAgent/edx-platform-custom,mahendra-r/edx-platform,rationalAgent/edx-platform-custom,romain-li/edx-platform,bitifirefly/edx-platform,devs1991/test_edx_docmode,xuxiao19910803/edx-platform,shubhdev/openedx,zhenzhai/edx-platform,atsolakid/edx-platform,chauhanhardik/populo,sameetb-cuelogic/edx-platform-test,ahmadio/edx-platform,jazkarta/edx-platform-for-isc,naresh21/synergetics-edx-platform,solashirai/edx-platform,alexthered/kienhoc-platform,kamalx/edx-platform,apigee/edx-platform,DNFcode/edx-platform,nagyistoce/edx-platform,mcgachey/edx-platform,Lektorium-LLC/edx-platform,MakeHer/edx-platform,shubhdev/edxOnBaadal,nikolas/edx-platform,edry/edx-platform,zubair-arbi/edx-platform,morpheby/levelup-by,martynovp/edx-platform,motion2015/a3,devs1991/test_edx_docmode,hkawasaki/kawasaki-aio8-0,appliedx/edx-platform,wwj718/ANALYSE,nttks/edx-platform,zubair-arbi/edx-platform,ESOedX/edx-platform,bigdatauniversity/edx-platform,devs1991/test_edx_docmode,MSOpenTech/edx-platform,alexthered/kienhoc-platform,deepsrijit1105/edx-platform,morenopc/edx-platform,pepeportela/edx-platform,leansoft/edx-platform,nanolearningllc/edx-platform-cypress,cecep-edu/edx-platform,raccoongang/edx-platform,jamiefolsom/edx-platform,IITBinterns13/edx-platform-dev,openfun/edx-platform,Edraak/edx-platform,shurihell/testasia,shabab12/edx-platform,jonathan-beard/edx-platform,UXE/local-edx,andyzsf/edx,morenopc/edx-platform,doismellburning/edx-platform,jazkarta/edx-platform,ak2703/edx-platform,y12uc231/edx-platform,jelugbo/tundex,edx/edx-platform,cecep-edu/edx-platform,amir-qayyum-khan/edx-platform,ESOedX/edx-platform,TsinghuaX/edx-platform,WatanabeYasumasa/edx-platform,MakeHer/edx-platform,CourseTalk/edx-platform,eduNEXT/edunext-platform,Softmotions/edx-platform,10clouds/edx-platform,ampax/edx-platform-backup,Stanford-Online/edx-platform,devs1991/test_edx_docmode,wwj718/ANALYSE,nttks/jenkins-test,atsolakid/edx-platform,edx-solutions/edx-platform,dsajkl/123,arifsetiawan/edx-platform,hkawasaki/kawasaki-aio8-1,bdero/edx-platform,jamesblunt/edx-platform,SravanthiSinha/edx-platform,eemirtekin/edx-platform,amir-qayyum-khan/edx-platform,Shrhawk/edx-platform,alu042/edx-platform,chudaol/edx-platform,jjmiranda/edx-platform,Edraak/edraak-platform,pelikanchik/edx-platform,dcosentino/edx-platform,bitifirefly/edx-platform,xinjiguaike/edx-platform,Softmotions/edx-platform,adoosii/edx-platform,xuxiao19910803/edx,prarthitm/edxplatform,chand3040/cloud_that,DNFcode/edx-platform,cecep-edu/edx-platform,auferack08/edx-platform,edx-solutions/edx-platform,rhndg/openedx,benpatterson/edx-platform,IndonesiaX/edx-platform,wwj718/ANALYSE,jjmiranda/edx-platform,Softmotions/edx-platform,4eek/edx-platform,marcore/edx-platform,pdehaye/theming-edx-platform,ak2703/edx-platform,kalebhartje/schoolboost,eduNEXT/edunext-platform,gymnasium/edx-platform,ZLLab-Mooc/edx-platform,mcgachey/edx-platform,cpennington/edx-platform,fly19890211/edx-platform,xuxiao19910803/edx,angelapper/edx-platform,longmen21/edx-platform,inares/edx-platform,SivilTaram/edx-platform,antonve/s4-project-mooc,prarthitm/edxplatform,gymnasium/edx-platform,mtlchun/edx,nanolearning/edx-platform,doismellburning/edx-platform,ubc/edx-platform,louyihua/edx-platform,halvertoluke/edx-platform,AkA84/edx-platform,knehez/edx-platform,arbrandes/edx-platform,cyanna/edx-platform,apigee/edx-platform,ahmedaljazzar/edx-platform,UXE/local-edx,andyzsf/edx,kursitet/edx-platform,nagyistoce/edx-platform,martynovp/edx-platform,ak2703/edx-platform,abdoosh00/edraak,SravanthiSinha/edx-platform,raccoongang/edx-platform,simbs/edx-platform,hmcmooc/muddx-platform,proversity-org/edx-platform,mjg2203/edx-platform-seas,antoviaque/edx-platform,WatanabeYasumasa/edx-platform,polimediaupv/edx-platform,xinjiguaike/edx-platform,vikas1885/test1,Edraak/edx-platform,a-parhom/edx-platform,mjirayu/sit_academy,doganov/edx-platform,EDUlib/edx-platform,UXE/local-edx,pepeportela/edx-platform,tanmaykm/edx-platform,yokose-ks/edx-platform,waheedahmed/edx-platform,wwj718/ANALYSE,EduPepperPDTesting/pepper2013-testing,halvertoluke/edx-platform,cyanna/edx-platform,kxliugang/edx-platform,shubhdev/openedx,procangroup/edx-platform,zerobatu/edx-platform,jelugbo/tundex,auferack08/edx-platform,proversity-org/edx-platform,abdoosh00/edx-rtl-final,teltek/edx-platform,nanolearningllc/edx-platform-cypress,msegado/edx-platform,chand3040/cloud_that,jamiefolsom/edx-platform,vikas1885/test1,nttks/edx-platform,carsongee/edx-platform,kmoocdev2/edx-platform,edry/edx-platform,EduPepperPDTesting/pepper2013-testing,franosincic/edx-platform,kmoocdev2/edx-platform,ak2703/edx-platform,marcore/edx-platform,chauhanhardik/populo_2,Edraak/edraak-platform,peterm-itr/edx-platform,dcosentino/edx-platform,adoosii/edx-platform,mcgachey/edx-platform,y12uc231/edx-platform,nanolearning/edx-platform,shubhdev/edx-platform,olexiim/edx-platform,SravanthiSinha/edx-platform,beni55/edx-platform,beacloudgenius/edx-platform,10clouds/edx-platform,IndonesiaX/edx-platform,ferabra/edx-platform,knehez/edx-platform,shabab12/edx-platform,caesar2164/edx-platform,jswope00/griffinx,shubhdev/edx-platform,mtlchun/edx,amir-qayyum-khan/edx-platform,CourseTalk/edx-platform,zadgroup/edx-platform,IndonesiaX/edx-platform,ampax/edx-platform,DefyVentures/edx-platform,pepeportela/edx-platform,jswope00/griffinx,jazkarta/edx-platform,rue89-tech/edx-platform,B-MOOC/edx-platform,iivic/BoiseStateX,kursitet/edx-platform,pepeportela/edx-platform,pabloborrego93/edx-platform,zerobatu/edx-platform,dkarakats/edx-platform,LearnEra/LearnEraPlaftform,longmen21/edx-platform,motion2015/edx-platform,iivic/BoiseStateX,iivic/BoiseStateX,louyihua/edx-platform,JCBarahona/edX,MakeHer/edx-platform,cognitiveclass/edx-platform,hkawasaki/kawasaki-aio8-2,jazztpt/edx-platform,synergeticsedx/deployment-wipro,rhndg/openedx,Livit/Livit.Learn.EdX,shubhdev/edx-platform,sameetb-cuelogic/edx-platform-test,angelapper/edx-platform,Ayub-Khan/edx-platform,caesar2164/edx-platform,motion2015/edx-platform,Ayub-Khan/edx-platform,motion2015/edx-platform,prarthitm/edxplatform,mtlchun/edx,shubhdev/edxOnBaadal,naresh21/synergetics-edx-platform,chand3040/cloud_that,naresh21/synergetics-edx-platform,adoosii/edx-platform,ahmadiga/min_edx,motion2015/edx-platform,amir-qayyum-khan/edx-platform,chrisndodge/edx-platform,appsembler/edx-platform,mcgachey/edx-platform,vikas1885/test1,kxliugang/edx-platform,y12uc231/edx-platform,morpheby/levelup-by,IndonesiaX/edx-platform,beni55/edx-platform,gsehub/edx-platform,morenopc/edx-platform,jruiperezv/ANALYSE,ahmadio/edx-platform,chudaol/edx-platform,edx/edx-platform,DefyVentures/edx-platform,louyihua/edx-platform,ahmadio/edx-platform,ferabra/edx-platform,Shrhawk/edx-platform,ubc/edx-platform,Ayub-Khan/edx-platform,jonathan-beard/edx-platform,jswope00/griffinx,rue89-tech/edx-platform,UOMx/edx-platform,Edraak/edx-platform,shubhdev/edxOnBaadal,Stanford-Online/edx-platform,eduNEXT/edx-platform,lduarte1991/edx-platform,Semi-global/edx-platform,JCBarahona/edX,syjeon/new_edx,openfun/edx-platform,ferabra/edx-platform,pelikanchik/edx-platform,xuxiao19910803/edx,JCBarahona/edX,tanmaykm/edx-platform,rhndg/openedx,kmoocdev2/edx-platform,dsajkl/reqiop
|
python
|
## Code Before:
from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'about_edx'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
expected_link = reverse('about_edx')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
## Instruction:
Fix test so that it works with both CMS and LMS settings
## Code After:
from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
|
// ... existing code ...
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
// ... modified code ...
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
// ... rest of the code ...
|
85220f2830d355245803965ee57886e5c1268833
|
tests/unit/test_twitter.py
|
tests/unit/test_twitter.py
|
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a tyipcal and a unique Discord url """
# unit test for a unique Discord url.
test = Unfurl()
test.add_to_queue(data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# test number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# is processing finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
|
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a typical and a unique Twitter url """
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# confirm that snowflake was detected
self.assertIn('Twitter Snowflakes', test.nodes[9].hover)
# embedded timestamp parses correctly
self.assertEqual('2019-02-20 14:40:26.837', test.nodes[13].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
|
Update Twitter test to be more robust
|
Update Twitter test to be more robust
|
Python
|
apache-2.0
|
obsidianforensics/unfurl,obsidianforensics/unfurl
|
python
|
## Code Before:
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a tyipcal and a unique Discord url """
# unit test for a unique Discord url.
test = Unfurl()
test.add_to_queue(data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# test number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# is processing finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
## Instruction:
Update Twitter test to be more robust
## Code After:
from unfurl import Unfurl
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a typical and a unique Twitter url """
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# confirm that snowflake was detected
self.assertIn('Twitter Snowflakes', test.nodes[9].hover)
# embedded timestamp parses correctly
self.assertEqual('2019-02-20 14:40:26.837', test.nodes[13].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
import unittest
class TestTwitter(unittest.TestCase):
def test_twitter(self):
""" Test a typical and a unique Twitter url """
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='https://twitter.com/_RyanBenson/status/1098230906194546688')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 13)
self.assertEqual(test.total_nodes, 13)
# confirm that snowflake was detected
self.assertIn('Twitter Snowflakes', test.nodes[9].hover)
# embedded timestamp parses correctly
self.assertEqual('2019-02-20 14:40:26.837', test.nodes[13].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
# ... rest of the code ...
|
ae593fd5de74a123b5064ef2e018b4955dc9e6c9
|
runserver.py
|
runserver.py
|
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day > 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
print "Cron succeeded"
#freezer = Freezer(app)
#freezer.freeze()
#set_metadata()
else:
app.run()
|
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day < 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
freezer = Freezer(app)
freezer.freeze()
set_metadata()
else:
app.run()
|
Complete testing, change back to production code
|
Complete testing, change back to production code
|
Python
|
apache-2.0
|
vprnet/dorothys-list,vprnet/dorothys-list,vprnet/dorothys-list
|
python
|
## Code Before:
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day > 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
print "Cron succeeded"
#freezer = Freezer(app)
#freezer.freeze()
#set_metadata()
else:
app.run()
## Instruction:
Complete testing, change back to production code
## Code After:
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day < 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
freezer = Freezer(app)
freezer.freeze()
set_metadata()
else:
app.run()
|
// ... existing code ...
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day < 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
freezer = Freezer(app)
freezer.freeze()
set_metadata()
else:
app.run()
// ... rest of the code ...
|
888cdf6797690fe202b03ac0fc2ba46d5df3c6d5
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='property-caching',
version='1.0.1',
description='Property caching',
author='Yola',
author_email='[email protected]',
license='MIT (Expat)',
url='https://github.com/yola/property-caching',
packages=['property_caching'],
test_suite='tests'
)
|
from setuptools import setup
setup(
name='property-caching',
version='1.0.1',
description='Property caching',
author='Yola',
author_email='[email protected]',
license='MIT (Expat)',
url='https://github.com/yola/property-caching',
packages=['property_caching'],
test_suite='tests',
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
]
)
|
Add classifiers for python 2 and 3 support
|
Add classifiers for python 2 and 3 support
|
Python
|
mit
|
yola/property-caching
|
python
|
## Code Before:
from setuptools import setup
setup(
name='property-caching',
version='1.0.1',
description='Property caching',
author='Yola',
author_email='[email protected]',
license='MIT (Expat)',
url='https://github.com/yola/property-caching',
packages=['property_caching'],
test_suite='tests'
)
## Instruction:
Add classifiers for python 2 and 3 support
## Code After:
from setuptools import setup
setup(
name='property-caching',
version='1.0.1',
description='Property caching',
author='Yola',
author_email='[email protected]',
license='MIT (Expat)',
url='https://github.com/yola/property-caching',
packages=['property_caching'],
test_suite='tests',
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
]
)
|
...
license='MIT (Expat)',
url='https://github.com/yola/property-caching',
packages=['property_caching'],
test_suite='tests',
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
]
)
...
|
7bc736b9a31d532930e9824842f7adb84436a7b8
|
django_filters/rest_framework/filters.py
|
django_filters/rest_framework/filters.py
|
from ..filters import *
from ..widgets import BooleanWidget
class BooleanFilter(BooleanFilter):
def __init__(self, *args, **kwargs):
kwargs.setdefault('widget', BooleanWidget)
super().__init__(*args, **kwargs)
|
from ..filters import BooleanFilter as _BooleanFilter
from ..filters import *
from ..widgets import BooleanWidget
class BooleanFilter(_BooleanFilter):
def __init__(self, *args, **kwargs):
kwargs.setdefault('widget', BooleanWidget)
super().__init__(*args, **kwargs)
|
Fix mypy error: circular inheritance
|
Fix mypy error: circular inheritance
Closes #832, #833
|
Python
|
bsd-3-clause
|
alex/django-filter,alex/django-filter
|
python
|
## Code Before:
from ..filters import *
from ..widgets import BooleanWidget
class BooleanFilter(BooleanFilter):
def __init__(self, *args, **kwargs):
kwargs.setdefault('widget', BooleanWidget)
super().__init__(*args, **kwargs)
## Instruction:
Fix mypy error: circular inheritance
Closes #832, #833
## Code After:
from ..filters import BooleanFilter as _BooleanFilter
from ..filters import *
from ..widgets import BooleanWidget
class BooleanFilter(_BooleanFilter):
def __init__(self, *args, **kwargs):
kwargs.setdefault('widget', BooleanWidget)
super().__init__(*args, **kwargs)
|
# ... existing code ...
from ..filters import BooleanFilter as _BooleanFilter
from ..filters import *
from ..widgets import BooleanWidget
class BooleanFilter(_BooleanFilter):
def __init__(self, *args, **kwargs):
kwargs.setdefault('widget', BooleanWidget)
# ... rest of the code ...
|
57a920583a48678f35a0f2e0bb61adcc561243b6
|
src/main/java/org/realityforge/replicant/shared/json/TransportConstants.java
|
src/main/java/org/realityforge/replicant/shared/json/TransportConstants.java
|
package org.realityforge.replicant.shared.json;
/**
* Constants used to build up JSON payload transmitted to the client.
*/
public interface TransportConstants
{
String LAST_CHANGE_SET_ID = "last_id";
String REQUEST_ID = "request_id";
String ETAG = "etag";
String CHANGES = "changes";
String ENTITY_ID = "id";
String TYPE_ID = "type";
String DATA = "data";
String CHANNEL_ACTIONS = "channel_actions";
String ACTION = "action";
String ACTION_ADD = "add";
String ACTION_REMOVE = "remove";
String CHANNELS = "channels";
String CHANNEL_ID = "cid";
String SUBCHANNEL_ID = "scid";
}
|
package org.realityforge.replicant.shared.json;
/**
* Constants used to build up JSON payload transmitted to the client.
*/
public interface TransportConstants
{
String LAST_CHANGE_SET_ID = "last_id";
String REQUEST_ID = "request_id";
String ETAG = "etag";
String CHANGES = "changes";
String ENTITY_ID = "id";
String TYPE_ID = "type";
String DATA = "data";
String CHANNEL_ACTIONS = "channel_actions";
String ACTION = "action";
String ACTION_ADD = "add";
String ACTION_REMOVE = "remove";
String ACTION_UPDATE = "update";
String CHANNELS = "channels";
String CHANNEL_ID = "cid";
String SUBCHANNEL_ID = "scid";
String CHANNEL_FILTER = "filter";
}
|
Add constants used to transport "channel" update messages, inclduing the new filter
|
Add constants used to transport "channel" update messages, inclduing the new filter
|
Java
|
apache-2.0
|
realityforge/replicant,realityforge/replicant
|
java
|
## Code Before:
package org.realityforge.replicant.shared.json;
/**
* Constants used to build up JSON payload transmitted to the client.
*/
public interface TransportConstants
{
String LAST_CHANGE_SET_ID = "last_id";
String REQUEST_ID = "request_id";
String ETAG = "etag";
String CHANGES = "changes";
String ENTITY_ID = "id";
String TYPE_ID = "type";
String DATA = "data";
String CHANNEL_ACTIONS = "channel_actions";
String ACTION = "action";
String ACTION_ADD = "add";
String ACTION_REMOVE = "remove";
String CHANNELS = "channels";
String CHANNEL_ID = "cid";
String SUBCHANNEL_ID = "scid";
}
## Instruction:
Add constants used to transport "channel" update messages, inclduing the new filter
## Code After:
package org.realityforge.replicant.shared.json;
/**
* Constants used to build up JSON payload transmitted to the client.
*/
public interface TransportConstants
{
String LAST_CHANGE_SET_ID = "last_id";
String REQUEST_ID = "request_id";
String ETAG = "etag";
String CHANGES = "changes";
String ENTITY_ID = "id";
String TYPE_ID = "type";
String DATA = "data";
String CHANNEL_ACTIONS = "channel_actions";
String ACTION = "action";
String ACTION_ADD = "add";
String ACTION_REMOVE = "remove";
String ACTION_UPDATE = "update";
String CHANNELS = "channels";
String CHANNEL_ID = "cid";
String SUBCHANNEL_ID = "scid";
String CHANNEL_FILTER = "filter";
}
|
...
String ACTION = "action";
String ACTION_ADD = "add";
String ACTION_REMOVE = "remove";
String ACTION_UPDATE = "update";
String CHANNELS = "channels";
String CHANNEL_ID = "cid";
String SUBCHANNEL_ID = "scid";
String CHANNEL_FILTER = "filter";
}
...
|
01f43d80fd4324f596904e22409c0b76bcb1b015
|
totalsum/templatetags/totalsum.py
|
totalsum/templatetags/totalsum.py
|
from django.template import Library, loader, Context
from django.contrib.admin.templatetags.admin_list import result_headers, result_hidden_fields, results, admin_actions
register = Library()
admin_actions = admin_actions
@register.simple_tag(takes_context=True)
def totalsum_result_list(context, cl, totals, unit_of_measure, template_name="totalsum_change_list_results.html"):
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
headers = list(result_headers(cl))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
c = {
'cl': cl,
'totals': totals,
'unit_of_measure': unit_of_measure,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(cl)),
'pagination_required': pagination_required
}
t = loader.get_template(template_name)
return t.render(Context(c))
@register.filter
def get_total(totals, column):
if column in totals.keys():
return totals[column]
return ''
|
from django.template import Library, loader
from django.contrib.admin.templatetags.admin_list import result_headers, result_hidden_fields, results, admin_actions
register = Library()
admin_actions = admin_actions
@register.simple_tag(takes_context=True)
def totalsum_result_list(context, cl, totals, unit_of_measure, template_name="totalsum_change_list_results.html"):
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
headers = list(result_headers(cl))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
c = {
'cl': cl,
'totals': totals,
'unit_of_measure': unit_of_measure,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(cl)),
'pagination_required': pagination_required
}
t = loader.get_template(template_name)
return t.render(c)
@register.filter
def get_total(totals, column):
if column in totals.keys():
return totals[column]
return ''
|
Update for Django version 1.11
|
Update for Django version 1.11
|
Python
|
mit
|
20tab/twentytab-totalsum-admin,20tab/twentytab-totalsum-admin
|
python
|
## Code Before:
from django.template import Library, loader, Context
from django.contrib.admin.templatetags.admin_list import result_headers, result_hidden_fields, results, admin_actions
register = Library()
admin_actions = admin_actions
@register.simple_tag(takes_context=True)
def totalsum_result_list(context, cl, totals, unit_of_measure, template_name="totalsum_change_list_results.html"):
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
headers = list(result_headers(cl))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
c = {
'cl': cl,
'totals': totals,
'unit_of_measure': unit_of_measure,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(cl)),
'pagination_required': pagination_required
}
t = loader.get_template(template_name)
return t.render(Context(c))
@register.filter
def get_total(totals, column):
if column in totals.keys():
return totals[column]
return ''
## Instruction:
Update for Django version 1.11
## Code After:
from django.template import Library, loader
from django.contrib.admin.templatetags.admin_list import result_headers, result_hidden_fields, results, admin_actions
register = Library()
admin_actions = admin_actions
@register.simple_tag(takes_context=True)
def totalsum_result_list(context, cl, totals, unit_of_measure, template_name="totalsum_change_list_results.html"):
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
headers = list(result_headers(cl))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
c = {
'cl': cl,
'totals': totals,
'unit_of_measure': unit_of_measure,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(cl)),
'pagination_required': pagination_required
}
t = loader.get_template(template_name)
return t.render(c)
@register.filter
def get_total(totals, column):
if column in totals.keys():
return totals[column]
return ''
|
// ... existing code ...
from django.template import Library, loader
from django.contrib.admin.templatetags.admin_list import result_headers, result_hidden_fields, results, admin_actions
// ... modified code ...
}
t = loader.get_template(template_name)
return t.render(c)
@register.filter
// ... rest of the code ...
|
ef0a6968dedad74ddd40bd4ae81595be6092f24f
|
wrapper/__init__.py
|
wrapper/__init__.py
|
__version__ = '2.2.0'
from libsbol import *
import unit_tests
|
from __future__ import absolute_import
__version__ = '2.2.0'
from sbol.libsbol import *
import sbol.unit_tests
|
Fix import issue with Python 3.6/Support future Python by forcing absolute import
|
Fix import issue with Python 3.6/Support future Python by forcing absolute import
|
Python
|
apache-2.0
|
SynBioDex/libSBOL,SynBioDex/libSBOL,SynBioDex/libSBOL,SynBioDex/libSBOL
|
python
|
## Code Before:
__version__ = '2.2.0'
from libsbol import *
import unit_tests
## Instruction:
Fix import issue with Python 3.6/Support future Python by forcing absolute import
## Code After:
from __future__ import absolute_import
__version__ = '2.2.0'
from sbol.libsbol import *
import sbol.unit_tests
|
...
from __future__ import absolute_import
__version__ = '2.2.0'
from sbol.libsbol import *
import sbol.unit_tests
...
|
6e4abfbfcc6ae88b96890689ff7d2955cbc8b3d4
|
src/test/java/test/issues/strava/Issue67.java
|
src/test/java/test/issues/strava/Issue67.java
|
/**
*
*/
package test.issues.strava;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.util.Arrays;
import java.util.List;
import javastrava.api.v3.model.StravaComment;
import javastrava.api.v3.rest.API;
import javastrava.api.v3.rest.ActivityAPI;
import org.junit.Test;
import test.api.service.impl.util.ActivityServiceUtils;
import test.utils.RateLimitedTestRunner;
import test.utils.TestUtils;
/**
* <p>
* Issue test for issue javastrava-api #67 - tests will PASS if the issue is still a problem
* </p>
*
* @author Dan Shannon
* @see <a href="https://github.com/danshannon/javastravav3api/issues/67>https://github.com/danshannon/javastravav3api/issues/67</a>
*/
public class Issue67 {
@Test
public void testIssue() throws Exception {
RateLimitedTestRunner.run(() -> {
final ActivityAPI api = API.instance(ActivityAPI.class, TestUtils.getValidToken());
final StravaComment comment = ActivityServiceUtils.createPrivateActivityWithComment();
final List<StravaComment> comments = Arrays.asList(api.listActivityComments(comment.getActivityId(), null, null, null));
assertNotNull(comments);
assertFalse(comments.isEmpty());
});
}
}
|
/**
*
*/
package test.issues.strava;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.util.Arrays;
import java.util.List;
import javastrava.api.v3.model.StravaComment;
import javastrava.api.v3.rest.API;
import javastrava.api.v3.rest.ActivityAPI;
import org.junit.Test;
import test.api.service.impl.util.ActivityServiceUtils;
import test.utils.RateLimitedTestRunner;
import test.utils.TestUtils;
/**
* <p>
* Issue test for issue javastrava-api #67 - tests will PASS if the issue is still a problem
* </p>
*
* @author Dan Shannon
* @see <a href="https://github.com/danshannon/javastravav3api/issues/67>https://github.com/danshannon/javastravav3api/issues/67</a>
*/
public class Issue67 {
@Test
public void testIssue() throws Exception {
RateLimitedTestRunner.run(() -> {
final ActivityAPI api = API.instance(ActivityAPI.class, TestUtils.getValidToken());
final StravaComment comment = ActivityServiceUtils.createPrivateActivityWithComment("Issue67.testIssue()");
final List<StravaComment> comments = Arrays.asList(api.listActivityComments(comment.getActivityId(), null, null, null));
assertNotNull(comments);
assertFalse(comments.isEmpty());
});
}
}
|
Enforce name conventions for test-created activities
|
Enforce name conventions for test-created activities
|
Java
|
apache-2.0
|
danshannon/javastrava-test
|
java
|
## Code Before:
/**
*
*/
package test.issues.strava;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.util.Arrays;
import java.util.List;
import javastrava.api.v3.model.StravaComment;
import javastrava.api.v3.rest.API;
import javastrava.api.v3.rest.ActivityAPI;
import org.junit.Test;
import test.api.service.impl.util.ActivityServiceUtils;
import test.utils.RateLimitedTestRunner;
import test.utils.TestUtils;
/**
* <p>
* Issue test for issue javastrava-api #67 - tests will PASS if the issue is still a problem
* </p>
*
* @author Dan Shannon
* @see <a href="https://github.com/danshannon/javastravav3api/issues/67>https://github.com/danshannon/javastravav3api/issues/67</a>
*/
public class Issue67 {
@Test
public void testIssue() throws Exception {
RateLimitedTestRunner.run(() -> {
final ActivityAPI api = API.instance(ActivityAPI.class, TestUtils.getValidToken());
final StravaComment comment = ActivityServiceUtils.createPrivateActivityWithComment();
final List<StravaComment> comments = Arrays.asList(api.listActivityComments(comment.getActivityId(), null, null, null));
assertNotNull(comments);
assertFalse(comments.isEmpty());
});
}
}
## Instruction:
Enforce name conventions for test-created activities
## Code After:
/**
*
*/
package test.issues.strava;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.util.Arrays;
import java.util.List;
import javastrava.api.v3.model.StravaComment;
import javastrava.api.v3.rest.API;
import javastrava.api.v3.rest.ActivityAPI;
import org.junit.Test;
import test.api.service.impl.util.ActivityServiceUtils;
import test.utils.RateLimitedTestRunner;
import test.utils.TestUtils;
/**
* <p>
* Issue test for issue javastrava-api #67 - tests will PASS if the issue is still a problem
* </p>
*
* @author Dan Shannon
* @see <a href="https://github.com/danshannon/javastravav3api/issues/67>https://github.com/danshannon/javastravav3api/issues/67</a>
*/
public class Issue67 {
@Test
public void testIssue() throws Exception {
RateLimitedTestRunner.run(() -> {
final ActivityAPI api = API.instance(ActivityAPI.class, TestUtils.getValidToken());
final StravaComment comment = ActivityServiceUtils.createPrivateActivityWithComment("Issue67.testIssue()");
final List<StravaComment> comments = Arrays.asList(api.listActivityComments(comment.getActivityId(), null, null, null));
assertNotNull(comments);
assertFalse(comments.isEmpty());
});
}
}
|
# ... existing code ...
public void testIssue() throws Exception {
RateLimitedTestRunner.run(() -> {
final ActivityAPI api = API.instance(ActivityAPI.class, TestUtils.getValidToken());
final StravaComment comment = ActivityServiceUtils.createPrivateActivityWithComment("Issue67.testIssue()");
final List<StravaComment> comments = Arrays.asList(api.listActivityComments(comment.getActivityId(), null, null, null));
assertNotNull(comments);
assertFalse(comments.isEmpty());
# ... rest of the code ...
|
85db39e36c99e800e1008605213d1c25108b035d
|
angr/paths.py
|
angr/paths.py
|
import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
|
import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
|
Allow specifying jumpkind with creating a Path via PathGenerator.blank_path()
|
Allow specifying jumpkind with creating a Path via PathGenerator.blank_path()
|
Python
|
bsd-2-clause
|
angr/angr,GuardianRG/angr,iamahuman/angr,cureHsu/angr,tyb0807/angr,mingderwang/angr,fjferrer/angr,angr/angr,zhuyue1314/angr,axt/angr,cureHsu/angr,chubbymaggie/angr,schieb/angr,lowks/angr,fjferrer/angr,zhuyue1314/angr,schieb/angr,chubbymaggie/angr,GuardianRG/angr,axt/angr,mingderwang/angr,avain/angr,schieb/angr,angr/angr,lowks/angr,haylesr/angr,iamahuman/angr,axt/angr,iamahuman/angr,f-prettyland/angr,xurantju/angr,chubbymaggie/angr,tyb0807/angr,xurantju/angr,avain/angr,f-prettyland/angr,haylesr/angr,terry2012/angr,tyb0807/angr,terry2012/angr,f-prettyland/angr
|
python
|
## Code Before:
import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
## Instruction:
Allow specifying jumpkind with creating a Path via PathGenerator.blank_path()
## Code After:
import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
|
...
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
...
|
36663add9f53da925f1d29c8c567ab30a1f33139
|
tests/api_resources/checkout/test_session.py
|
tests/api_resources/checkout/test_session.py
|
from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "loc_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "[email protected]"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
|
from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "[email protected]"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
|
Add support for retrieving a Checkout Session
|
Add support for retrieving a Checkout Session
|
Python
|
mit
|
stripe/stripe-python
|
python
|
## Code Before:
from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "loc_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "[email protected]"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
## Instruction:
Add support for retrieving a Checkout Session
## Code After:
from __future__ import absolute_import, division, print_function
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
def test_is_creatable(self, request_mock):
resource = stripe.checkout.Session.create(
cancel_url="https://stripe.com/cancel",
client_reference_id="1234",
line_items=[
{
"amount": 123,
"currency": "usd",
"description": "item 1",
"images": ["https://stripe.com/img1"],
"name": "name",
"quantity": 2,
}
],
payment_intent_data={"receipt_email": "[email protected]"},
payment_method_types=["card"],
success_url="https://stripe.com/success",
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
|
// ... existing code ...
import stripe
TEST_RESOURCE_ID = "cs_123"
class TestSession(object):
// ... modified code ...
)
request_mock.assert_requested("post", "/v1/checkout/sessions")
assert isinstance(resource, stripe.checkout.Session)
def test_is_retrievable(self, request_mock):
resource = stripe.checkout.Session.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/checkout/sessions/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.checkout.Session)
// ... rest of the code ...
|
78b1d21aebf4c0c4a122d3a6d14f41ae64529f10
|
src/main/java/jannotate/beans/AnnotationEjb.java
|
src/main/java/jannotate/beans/AnnotationEjb.java
|
package jannotate.beans;
import jannotate.domain.Annotation;
import java.util.List;
import javax.annotation.Resource;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Transient;
import javax.ejb.*;
@Stateless
public class AnnotationEjb {
@PersistenceContext
EntityManager em;
public List<Annotation> getAnnotations() {
return (List<Annotation>) em.createQuery(
"SELECT a FROM Annotation a order by a.api, a.name").getResultList();
}
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public String addEntry(Annotation instance) {
System.out.println("AnnotationEjb.addEntry():" + instance);
em.persist(instance);
return "index";
}
}
|
package jannotate.beans;
import jannotate.domain.Annotation;
import java.util.List;
import javax.annotation.Resource;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Transient;
import javax.ejb.*;
@Stateless
public class AnnotationEjb {
@PersistenceContext
EntityManager em;
@TransactionAttribute()
public List<Annotation> getAnnotations() {
return (List<Annotation>) em.createQuery(
"SELECT a FROM Annotation a order by a.api, a.name").getResultList();
}
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public String addEntry(Annotation instance) {
System.out.println("AnnotationEjb.addEntry():" + instance);
em.persist(instance);
return "index";
}
}
|
Add dummy @TransactionAttribute on read method.
|
Add dummy @TransactionAttribute on read method.
|
Java
|
bsd-2-clause
|
IanDarwin/jannotate
|
java
|
## Code Before:
package jannotate.beans;
import jannotate.domain.Annotation;
import java.util.List;
import javax.annotation.Resource;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Transient;
import javax.ejb.*;
@Stateless
public class AnnotationEjb {
@PersistenceContext
EntityManager em;
public List<Annotation> getAnnotations() {
return (List<Annotation>) em.createQuery(
"SELECT a FROM Annotation a order by a.api, a.name").getResultList();
}
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public String addEntry(Annotation instance) {
System.out.println("AnnotationEjb.addEntry():" + instance);
em.persist(instance);
return "index";
}
}
## Instruction:
Add dummy @TransactionAttribute on read method.
## Code After:
package jannotate.beans;
import jannotate.domain.Annotation;
import java.util.List;
import javax.annotation.Resource;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Transient;
import javax.ejb.*;
@Stateless
public class AnnotationEjb {
@PersistenceContext
EntityManager em;
@TransactionAttribute()
public List<Annotation> getAnnotations() {
return (List<Annotation>) em.createQuery(
"SELECT a FROM Annotation a order by a.api, a.name").getResultList();
}
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public String addEntry(Annotation instance) {
System.out.println("AnnotationEjb.addEntry():" + instance);
em.persist(instance);
return "index";
}
}
|
// ... existing code ...
@PersistenceContext
EntityManager em;
@TransactionAttribute()
public List<Annotation> getAnnotations() {
return (List<Annotation>) em.createQuery(
"SELECT a FROM Annotation a order by a.api, a.name").getResultList();
// ... rest of the code ...
|
7f6167ef9f62b9b79e3c30b358c796caae69a2e6
|
PyWXSB/exceptions_.py
|
PyWXSB/exceptions_.py
|
import exceptions
class PyWXSBException (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user should fix."""
pass
class SchemaValidationError (PyWXSBException):
"""Raised when the XML hierarchy does not appear to be valid for an XML schema."""
pass
class BadTypeValueError (PyWXSBException):
"""Raised when a value in an XML attribute does not conform to the simple type."""
pass
class NotInNamespaceError (PyWXSBException):
'''Raised when a name is referenced that is not defined in the appropriate namespace.'''
__namespace = None
__ncName = None
class BadPropertyError (PyWXSBException):
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
class LogicError (PyWXSBError):
"""Raised when the code detects an implementation problem."""
class IncompleteImplementationError (LogicError):
"""Raised when a code branch is taken that has not yet been implemented."""
|
import exceptions
class PyWXSBException (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user should fix."""
pass
class SchemaValidationError (PyWXSBException):
"""Raised when the XML hierarchy does not appear to be valid for an XML schema."""
pass
class BadTypeValueError (PyWXSBException):
"""Raised when a value in an XML attribute does not conform to the simple type."""
pass
class NotInNamespaceError (PyWXSBException):
'''Raised when a name is referenced that is not defined in the appropriate namespace.'''
__namespace = None
__ncName = None
class BadPropertyError (PyWXSBException):
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
class BadDocumentError (PyWXSBException):
"""Raised when processing document content and an error is encountered."""
pass
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
class LogicError (PyWXSBError):
"""Raised when the code detects an implementation problem."""
class IncompleteImplementationError (LogicError):
"""Raised when a code branch is taken that has not yet been implemented."""
|
Add an exception to throw when a document does have the expected structure
|
Add an exception to throw when a document does have the expected structure
|
Python
|
apache-2.0
|
jonfoster/pyxb-upstream-mirror,jonfoster/pyxb-upstream-mirror,balanced/PyXB,jonfoster/pyxb2,pabigot/pyxb,CantemoInternal/pyxb,jonfoster/pyxb2,balanced/PyXB,CantemoInternal/pyxb,jonfoster/pyxb1,pabigot/pyxb,jonfoster/pyxb2,CantemoInternal/pyxb,jonfoster/pyxb-upstream-mirror,jonfoster/pyxb1,balanced/PyXB
|
python
|
## Code Before:
import exceptions
class PyWXSBException (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user should fix."""
pass
class SchemaValidationError (PyWXSBException):
"""Raised when the XML hierarchy does not appear to be valid for an XML schema."""
pass
class BadTypeValueError (PyWXSBException):
"""Raised when a value in an XML attribute does not conform to the simple type."""
pass
class NotInNamespaceError (PyWXSBException):
'''Raised when a name is referenced that is not defined in the appropriate namespace.'''
__namespace = None
__ncName = None
class BadPropertyError (PyWXSBException):
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
class LogicError (PyWXSBError):
"""Raised when the code detects an implementation problem."""
class IncompleteImplementationError (LogicError):
"""Raised when a code branch is taken that has not yet been implemented."""
## Instruction:
Add an exception to throw when a document does have the expected structure
## Code After:
import exceptions
class PyWXSBException (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user should fix."""
pass
class SchemaValidationError (PyWXSBException):
"""Raised when the XML hierarchy does not appear to be valid for an XML schema."""
pass
class BadTypeValueError (PyWXSBException):
"""Raised when a value in an XML attribute does not conform to the simple type."""
pass
class NotInNamespaceError (PyWXSBException):
'''Raised when a name is referenced that is not defined in the appropriate namespace.'''
__namespace = None
__ncName = None
class BadPropertyError (PyWXSBException):
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
class BadDocumentError (PyWXSBException):
"""Raised when processing document content and an error is encountered."""
pass
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
class LogicError (PyWXSBError):
"""Raised when the code detects an implementation problem."""
class IncompleteImplementationError (LogicError):
"""Raised when a code branch is taken that has not yet been implemented."""
|
# ... existing code ...
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
class BadDocumentError (PyWXSBException):
"""Raised when processing document content and an error is encountered."""
pass
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
# ... rest of the code ...
|
335abda444cbd5651af0d9a298570144627c7022
|
passwordless/utils.py
|
passwordless/utils.py
|
import os
import random
import uuid
from django.contrib.auth.hashers import make_password,is_password_usable
from django.utils import timezone
from datetime import timedelta
WORDLIST_FILE = os.path.join(os.path.dirname(__file__), 'wordlist.txt')
def make_token():
"""
Generate a random token suitable for activation/confirmation via email
A hex-encoded random UUID has plenty of entropy to be secure enough for our
needs.
"""
return uuid.uuid4().hex
def expiration_date():
"""
AuthToken objects expire 1 hour after creation by default
"""
return timezone.now() + timedelta(hours=1)
def new_app_password(size=6):
f = open(WORDLIST_FILE, 'r')
words = []
for i in range(size):
words.append(next(f).strip())
for num,line in enumerate(f):
j = random.randrange(size+num)
if j < size:
words[j] = line.strip()
return words
|
import os
import random
import uuid
from django.utils import timezone
from datetime import timedelta
WORDLIST_FILE = os.path.join(os.path.dirname(__file__), 'wordlist.txt')
def make_token():
"""
Generate a random token suitable for activation/confirmation via email
A hex-encoded random UUID has plenty of entropy to be secure enough for our
needs.
"""
return uuid.uuid4().hex
def expiration_date():
"""
AuthToken objects expire 1 hour after creation by default
"""
return timezone.now() + timedelta(hours=1)
def new_app_password(size=6):
f = open(WORDLIST_FILE, 'r')
words = []
for i in range(size):
words.append(next(f).strip())
for num,line in enumerate(f):
j = random.randrange(size+num)
if j < size:
words[j] = line.strip()
return ' '.join(words)
|
Return app passwords as string
|
Return app passwords as string
|
Python
|
mit
|
Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters
|
python
|
## Code Before:
import os
import random
import uuid
from django.contrib.auth.hashers import make_password,is_password_usable
from django.utils import timezone
from datetime import timedelta
WORDLIST_FILE = os.path.join(os.path.dirname(__file__), 'wordlist.txt')
def make_token():
"""
Generate a random token suitable for activation/confirmation via email
A hex-encoded random UUID has plenty of entropy to be secure enough for our
needs.
"""
return uuid.uuid4().hex
def expiration_date():
"""
AuthToken objects expire 1 hour after creation by default
"""
return timezone.now() + timedelta(hours=1)
def new_app_password(size=6):
f = open(WORDLIST_FILE, 'r')
words = []
for i in range(size):
words.append(next(f).strip())
for num,line in enumerate(f):
j = random.randrange(size+num)
if j < size:
words[j] = line.strip()
return words
## Instruction:
Return app passwords as string
## Code After:
import os
import random
import uuid
from django.utils import timezone
from datetime import timedelta
WORDLIST_FILE = os.path.join(os.path.dirname(__file__), 'wordlist.txt')
def make_token():
"""
Generate a random token suitable for activation/confirmation via email
A hex-encoded random UUID has plenty of entropy to be secure enough for our
needs.
"""
return uuid.uuid4().hex
def expiration_date():
"""
AuthToken objects expire 1 hour after creation by default
"""
return timezone.now() + timedelta(hours=1)
def new_app_password(size=6):
f = open(WORDLIST_FILE, 'r')
words = []
for i in range(size):
words.append(next(f).strip())
for num,line in enumerate(f):
j = random.randrange(size+num)
if j < size:
words[j] = line.strip()
return ' '.join(words)
|
// ... existing code ...
import uuid
from django.utils import timezone
from datetime import timedelta
// ... modified code ...
if j < size:
words[j] = line.strip()
return ' '.join(words)
// ... rest of the code ...
|
3efd847f8569a30b018925b39d1552a4aead6e8f
|
destroyer/destroyer.py
|
destroyer/destroyer.py
|
import click
from .services.twitter import TwitterDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
def main():
cli.add_command(twitter)
cli()
|
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
|
Update main module with facebook integration
|
Update main module with facebook integration
|
Python
|
mit
|
jaredmichaelsmith/destroyer
|
python
|
## Code Before:
import click
from .services.twitter import TwitterDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
def main():
cli.add_command(twitter)
cli()
## Instruction:
Update main module with facebook integration
## Code After:
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
def cli():
pass
@click.command()
@click.option('--unfollow_nonfollowers', default=False, type=click.BOOL)
def twitter(unfollow_nonfollowers):
twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers)
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
|
// ... existing code ...
import click
from .services.twitter import TwitterDestroyer
from .services.facebook import FacebookDestroyer
@click.group()
// ... modified code ...
twitter_destroyer.destroy()
@click.command()
def facebook():
facebook_destroyer = FacebookDestroyer()
facebook_destroyer.destroy()
def main():
cli.add_command(twitter)
cli.add_command(facebook)
cli()
// ... rest of the code ...
|
774ece47574466f6661de469ef0f43ecf97d66f0
|
molly/utils/misc.py
|
molly/utils/misc.py
|
import urllib2, sys, os.path, imp
class AnyMethodRequest(urllib2.Request):
def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=None, method=None):
self.method = method and method.upper() or None
urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
def get_method(self):
if not self.method is None:
return self.method
elif self.has_data():
return "POST"
else:
return "GET"
def get_norm_sys_path():
"""
Returns a normalised path that can be used for PYTHONPATH to recreate the
path used for this invocation.
"""
sys_path = sys.path[:]
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != '']
# Remove duplicates. This is O(n^2), but efficiency isn't too much of an
# issue when n is small.
sys_path = [p for i,p in enumerate(sys_path) if p not in sys_path[:i]]
return sys_path
|
import urllib2, sys, os.path, imp
class AnyMethodRequest(urllib2.Request):
def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=None, method=None):
self.method = method and method.upper() or None
urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
def get_method(self):
if not self.method is None:
return self.method
elif self.has_data():
return "POST"
else:
return "GET"
def get_norm_sys_path():
"""
Returns a normalised path that can be used for PYTHONPATH to recreate the
path used for this invocation.
"""
sys_path = sys.path[:]
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
try:
project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
except ImportError:
project_path = imp.find_module('settings')[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != '']
# Remove duplicates. This is O(n^2), but efficiency isn't too much of an
# issue when n is small.
sys_path = [p for i,p in enumerate(sys_path) if p not in sys_path[:i]]
return sys_path
|
Make create_crontab fall back to default settings search before explicitly searching for DJANGO_SETTINGS_MODULE
|
Make create_crontab fall back to default settings search before explicitly searching for DJANGO_SETTINGS_MODULE
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
python
|
## Code Before:
import urllib2, sys, os.path, imp
class AnyMethodRequest(urllib2.Request):
def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=None, method=None):
self.method = method and method.upper() or None
urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
def get_method(self):
if not self.method is None:
return self.method
elif self.has_data():
return "POST"
else:
return "GET"
def get_norm_sys_path():
"""
Returns a normalised path that can be used for PYTHONPATH to recreate the
path used for this invocation.
"""
sys_path = sys.path[:]
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != '']
# Remove duplicates. This is O(n^2), but efficiency isn't too much of an
# issue when n is small.
sys_path = [p for i,p in enumerate(sys_path) if p not in sys_path[:i]]
return sys_path
## Instruction:
Make create_crontab fall back to default settings search before explicitly searching for DJANGO_SETTINGS_MODULE
## Code After:
import urllib2, sys, os.path, imp
class AnyMethodRequest(urllib2.Request):
def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=None, method=None):
self.method = method and method.upper() or None
urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
def get_method(self):
if not self.method is None:
return self.method
elif self.has_data():
return "POST"
else:
return "GET"
def get_norm_sys_path():
"""
Returns a normalised path that can be used for PYTHONPATH to recreate the
path used for this invocation.
"""
sys_path = sys.path[:]
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
try:
project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
except ImportError:
project_path = imp.find_module('settings')[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != '']
# Remove duplicates. This is O(n^2), but efficiency isn't too much of an
# issue when n is small.
sys_path = [p for i,p in enumerate(sys_path) if p not in sys_path[:i]]
return sys_path
|
# ... existing code ...
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
try:
project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
except ImportError:
project_path = imp.find_module('settings')[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != '']
# ... rest of the code ...
|
e4cb8bbffd6f60002cff8f37d42f576ba6f1891c
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name = "jpb",
version = "0.0.1",
packages = find_packages(),
#package_dir = {'':"lib"},
zip_safe = False,
entry_points = {
'console_scripts': [
'jpb_generate_source_package = jpb.cli:generate_source_package',
'jpb_build_source_package = jpb.cli:build_source_package',
'jpb_provide_package = jpb.cli:provide_package'
],
},
author = "Bernhard Miklautz",
author_email = "[email protected]",
license = "MIT",
#keywords=
#url=
)
# vim:foldmethod=marker ts=2 ft=python ai sw=2
|
from setuptools import setup, find_packages
setup(
name = "jpb",
version = "0.0.1",
packages = find_packages(),
#package_dir = {'':"lib"},
zip_safe = False,
entry_points = {
'console_scripts': [
'jpb_generate_source_package = jpb.cli:generate_source_package',
'jpb_generate_binary_package = jpb.cli:generate_binary_package',
'jpb_provide_package = jpb.cli:provide_package'
],
},
author = "Bernhard Miklautz",
author_email = "[email protected]",
license = "MIT",
#keywords=
#url=
)
# vim:foldmethod=marker ts=2 ft=python ai sw=2
|
Rename binary back to jpb_generate_binary_package
|
Rename binary back to jpb_generate_binary_package
Binary jpb_generate_binary_package was named as jpb_build_source_package
This seems to be an accidental change made in commit df61be3d
|
Python
|
mit
|
bmiklautz/jenkins-package-builder
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name = "jpb",
version = "0.0.1",
packages = find_packages(),
#package_dir = {'':"lib"},
zip_safe = False,
entry_points = {
'console_scripts': [
'jpb_generate_source_package = jpb.cli:generate_source_package',
'jpb_build_source_package = jpb.cli:build_source_package',
'jpb_provide_package = jpb.cli:provide_package'
],
},
author = "Bernhard Miklautz",
author_email = "[email protected]",
license = "MIT",
#keywords=
#url=
)
# vim:foldmethod=marker ts=2 ft=python ai sw=2
## Instruction:
Rename binary back to jpb_generate_binary_package
Binary jpb_generate_binary_package was named as jpb_build_source_package
This seems to be an accidental change made in commit df61be3d
## Code After:
from setuptools import setup, find_packages
setup(
name = "jpb",
version = "0.0.1",
packages = find_packages(),
#package_dir = {'':"lib"},
zip_safe = False,
entry_points = {
'console_scripts': [
'jpb_generate_source_package = jpb.cli:generate_source_package',
'jpb_generate_binary_package = jpb.cli:generate_binary_package',
'jpb_provide_package = jpb.cli:provide_package'
],
},
author = "Bernhard Miklautz",
author_email = "[email protected]",
license = "MIT",
#keywords=
#url=
)
# vim:foldmethod=marker ts=2 ft=python ai sw=2
|
// ... existing code ...
entry_points = {
'console_scripts': [
'jpb_generate_source_package = jpb.cli:generate_source_package',
'jpb_generate_binary_package = jpb.cli:generate_binary_package',
'jpb_provide_package = jpb.cli:provide_package'
],
},
// ... rest of the code ...
|
5352e164b38099cbc7fe4eba87c00bc1c1d30d44
|
bluezero/eddystone.py
|
bluezero/eddystone.py
|
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url):
service_data = tools.url_to_advert(url, 0x10, 0x00)
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
|
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url, tx_power=0x08):
"""
The Eddystone-URL frame broadcasts a URL using a compressed encoding
format in order to fit more within the limited advertisement packet.
Example:
>>> from bluezero import eddystone
>>> eddystone.EddystoneURL('https://github.com/ukBaz')
:param url: String containing URL e.g. ('http://camjam.me')
:param tx_power:
"""
service_data = tools.url_to_advert(url, 0x10, tx_power)
if len(service_data) > 17:
raise Exception('URL too long')
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
|
Test for URL length error
|
Test for URL length error
|
Python
|
mit
|
ukBaz/python-bluezero,ukBaz/python-bluezero
|
python
|
## Code Before:
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url):
service_data = tools.url_to_advert(url, 0x10, 0x00)
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
## Instruction:
Test for URL length error
## Code After:
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url, tx_power=0x08):
"""
The Eddystone-URL frame broadcasts a URL using a compressed encoding
format in order to fit more within the limited advertisement packet.
Example:
>>> from bluezero import eddystone
>>> eddystone.EddystoneURL('https://github.com/ukBaz')
:param url: String containing URL e.g. ('http://camjam.me')
:param tx_power:
"""
service_data = tools.url_to_advert(url, 0x10, tx_power)
if len(service_data) > 17:
raise Exception('URL too long')
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
|
# ... existing code ...
class EddystoneURL:
def __init__(self, url, tx_power=0x08):
"""
The Eddystone-URL frame broadcasts a URL using a compressed encoding
format in order to fit more within the limited advertisement packet.
Example:
>>> from bluezero import eddystone
>>> eddystone.EddystoneURL('https://github.com/ukBaz')
:param url: String containing URL e.g. ('http://camjam.me')
:param tx_power:
"""
service_data = tools.url_to_advert(url, 0x10, tx_power)
if len(service_data) > 17:
raise Exception('URL too long')
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
# ... rest of the code ...
|
47c0feaf96969d65e8f3e3652903cc20b353103d
|
vtwt/util.py
|
vtwt/util.py
|
import re
from htmlentitydefs import name2codepoint
# From http://wiki.python.org/moin/EscapingHtml
_HTMLENT_CODEPOINT_RE = re.compile('&({0}|#\d+);'.format(
'|'.join(name2codepoint.keys())))
def recodeText(text):
def _entToUnichr(match):
ent = match.group(1)
try:
if ent.startswith("#"):
char = unichr(int(ent[1:]))
else:
char = unichr(name2codepoint[ent])
except:
char = match.group(0)
return char
return _HTMLENT_CODEPOINT_RE.sub(_entToUnichr, text)
|
import re
from htmlentitydefs import name2codepoint
# From http://wiki.python.org/moin/EscapingHtml
_HTMLENT_CODEPOINT_RE = re.compile('&({0}|#\d+);'.format(
'|'.join(name2codepoint.keys())))
def recodeText(text):
"""Parses things like & and ὔ into real characters."""
def _entToUnichr(match):
ent = match.group(1)
try:
if ent.startswith("#"):
char = unichr(int(ent[1:]))
else:
char = unichr(name2codepoint[ent])
except:
char = match.group(0)
return char
return _HTMLENT_CODEPOINT_RE.sub(_entToUnichr, text)
|
Add a comment for robin
|
Add a comment for robin
|
Python
|
bsd-3-clause
|
olix0r/vtwt
|
python
|
## Code Before:
import re
from htmlentitydefs import name2codepoint
# From http://wiki.python.org/moin/EscapingHtml
_HTMLENT_CODEPOINT_RE = re.compile('&({0}|#\d+);'.format(
'|'.join(name2codepoint.keys())))
def recodeText(text):
def _entToUnichr(match):
ent = match.group(1)
try:
if ent.startswith("#"):
char = unichr(int(ent[1:]))
else:
char = unichr(name2codepoint[ent])
except:
char = match.group(0)
return char
return _HTMLENT_CODEPOINT_RE.sub(_entToUnichr, text)
## Instruction:
Add a comment for robin
## Code After:
import re
from htmlentitydefs import name2codepoint
# From http://wiki.python.org/moin/EscapingHtml
_HTMLENT_CODEPOINT_RE = re.compile('&({0}|#\d+);'.format(
'|'.join(name2codepoint.keys())))
def recodeText(text):
"""Parses things like & and ὔ into real characters."""
def _entToUnichr(match):
ent = match.group(1)
try:
if ent.startswith("#"):
char = unichr(int(ent[1:]))
else:
char = unichr(name2codepoint[ent])
except:
char = match.group(0)
return char
return _HTMLENT_CODEPOINT_RE.sub(_entToUnichr, text)
|
// ... existing code ...
'|'.join(name2codepoint.keys())))
def recodeText(text):
"""Parses things like & and ὔ into real characters."""
def _entToUnichr(match):
ent = match.group(1)
try:
// ... rest of the code ...
|
6da3b2a09914546bbc29caa9f807e05f0ee66b0d
|
cybox/objects/port_object.py
|
cybox/objects/port_object.py
|
import cybox.utils as utils
import cybox.bindings.port_object as port_binding
from cybox.common import ObjectProperties, String, PositiveInteger
class Port(ObjectProperties):
_namespace = 'http://cybox.mitre.org/objects#PortObject-2'
_XSI_NS = "PortObj"
_XSI_TYPE = "PortObjectType"
def __init__(self):
super(Port, self).__init__()
self.port_value = None
self.layer4_protocol = None
def to_obj(self):
port_obj = port_binding.PortObjectType()
super(Port, self).to_obj(port_obj)
if self.port_value is not None:
port_obj.set_Port_Value(self.port_value.to_obj())
if self.layer4_protocol is not None:
port_obj.set_Layer4_Protocol(self.layer4_protocol.to_obj())
return port_obj
def to_dict(self):
port_dict = {}
super(Port, self).to_dict(port_dict)
if self.port_value is not None:
port_dict['port_value'] = self.port_value.to_dict()
if self.layer4_protocol is not None:
port_dict['layer4_protocol'] = self.layer4_protocol.to_dict()
return port_dict
@staticmethod
def from_dict(port_dict):
if not port_dict:
return None
port = Port()
ObjectProperties.from_dict(port_dict, port)
port.port_value = PositiveInteger.from_dict(port_dict.get('port_value'))
port.layer4_protocol = String.from_dict(port_dict.get('layer4_protocol'))
return port
@staticmethod
def from_obj(port_obj):
if not port_obj:
return None
port = Port()
ObjectProperties.from_obj(port_obj, port)
port.port_value = PositiveInteger.from_obj(port_obj.get_Port_Value())
port.layer4_protocol = String.from_obj(port_obj.get_Layer4_Protocol())
return port
|
import cybox
import cybox.bindings.port_object as port_binding
from cybox.common import ObjectProperties, String, PositiveInteger
class Port(ObjectProperties):
_binding = port_binding
_binding_class = port_binding.PortObjectType
_namespace = 'http://cybox.mitre.org/objects#PortObject-2'
_XSI_NS = "PortObj"
_XSI_TYPE = "PortObjectType"
port_value = cybox.TypedField("Port_Value", PositiveInteger)
layer4_protocol = cybox.TypedField("Layer4_Protocol", String)
|
Convert Port object to simpler representation
|
Convert Port object to simpler representation
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
python
|
## Code Before:
import cybox.utils as utils
import cybox.bindings.port_object as port_binding
from cybox.common import ObjectProperties, String, PositiveInteger
class Port(ObjectProperties):
_namespace = 'http://cybox.mitre.org/objects#PortObject-2'
_XSI_NS = "PortObj"
_XSI_TYPE = "PortObjectType"
def __init__(self):
super(Port, self).__init__()
self.port_value = None
self.layer4_protocol = None
def to_obj(self):
port_obj = port_binding.PortObjectType()
super(Port, self).to_obj(port_obj)
if self.port_value is not None:
port_obj.set_Port_Value(self.port_value.to_obj())
if self.layer4_protocol is not None:
port_obj.set_Layer4_Protocol(self.layer4_protocol.to_obj())
return port_obj
def to_dict(self):
port_dict = {}
super(Port, self).to_dict(port_dict)
if self.port_value is not None:
port_dict['port_value'] = self.port_value.to_dict()
if self.layer4_protocol is not None:
port_dict['layer4_protocol'] = self.layer4_protocol.to_dict()
return port_dict
@staticmethod
def from_dict(port_dict):
if not port_dict:
return None
port = Port()
ObjectProperties.from_dict(port_dict, port)
port.port_value = PositiveInteger.from_dict(port_dict.get('port_value'))
port.layer4_protocol = String.from_dict(port_dict.get('layer4_protocol'))
return port
@staticmethod
def from_obj(port_obj):
if not port_obj:
return None
port = Port()
ObjectProperties.from_obj(port_obj, port)
port.port_value = PositiveInteger.from_obj(port_obj.get_Port_Value())
port.layer4_protocol = String.from_obj(port_obj.get_Layer4_Protocol())
return port
## Instruction:
Convert Port object to simpler representation
## Code After:
import cybox
import cybox.bindings.port_object as port_binding
from cybox.common import ObjectProperties, String, PositiveInteger
class Port(ObjectProperties):
_binding = port_binding
_binding_class = port_binding.PortObjectType
_namespace = 'http://cybox.mitre.org/objects#PortObject-2'
_XSI_NS = "PortObj"
_XSI_TYPE = "PortObjectType"
port_value = cybox.TypedField("Port_Value", PositiveInteger)
layer4_protocol = cybox.TypedField("Layer4_Protocol", String)
|
...
import cybox
import cybox.bindings.port_object as port_binding
from cybox.common import ObjectProperties, String, PositiveInteger
class Port(ObjectProperties):
_binding = port_binding
_binding_class = port_binding.PortObjectType
_namespace = 'http://cybox.mitre.org/objects#PortObject-2'
_XSI_NS = "PortObj"
_XSI_TYPE = "PortObjectType"
port_value = cybox.TypedField("Port_Value", PositiveInteger)
layer4_protocol = cybox.TypedField("Layer4_Protocol", String)
...
|
85897c2bf4e4e9c89db6111894879d18fef577dd
|
app.tmpl/__init__.py
|
app.tmpl/__init__.py
|
from flask import Flask
app = Flask(__name__)
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
|
from flask import Flask
from flask_login import LoginManager
app = Flask(__name__)
app.secret_key = 'default-secret-key'
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
|
Use the login manager and set a default app secret key
|
Use the login manager and set a default app secret key
|
Python
|
mit
|
0xquad/flask-app-template,0xquad/flask-app-template,0xquad/flask-app-template
|
python
|
## Code Before:
from flask import Flask
app = Flask(__name__)
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
## Instruction:
Use the login manager and set a default app secret key
## Code After:
from flask import Flask
from flask_login import LoginManager
app = Flask(__name__)
app.secret_key = 'default-secret-key'
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
|
# ... existing code ...
from flask import Flask
from flask_login import LoginManager
app = Flask(__name__)
app.secret_key = 'default-secret-key'
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
# ... rest of the code ...
|
789b33f8c6d4ddad4c46e7a3815d9f9543485caa
|
usb/blueprints/api.py
|
usb/blueprints/api.py
|
from flask import Blueprint, jsonify, request
from usb.models import db, Redirect, DeviceType
from usb.shortener import get_short_id, get_short_url
api = Blueprint('api', __name__)
@api.route('/links')
def get_links():
return jsonify({}), 200
@api.route('/links', methods=['POST'])
def shorten_url():
short_id = get_short_id()
long_url = request.json['url']
for device_type in DeviceType:
db.session.add(Redirect(short_id, device_type, long_url))
db.session.commit()
short_url = get_short_url(short_id)
return jsonify(url=short_url), 200
|
from flask import Blueprint, jsonify, request
from usb.models import db, Redirect, DeviceType
from usb.shortener import get_short_id, get_short_url
api = Blueprint('api', __name__)
@api.route('/links')
def get_links():
return jsonify({}), 200
@api.route('/links', methods=['POST'])
def shorten_url():
short_id = get_short_id()
long_url = request.json['url']
redirect = Redirect.query.filter_by(url=long_url).first()
if redirect:
short_url = get_short_url(redirect.short)
return jsonify(url=short_url), 409
for device_type in DeviceType:
db.session.add(Redirect(short_id, device_type, long_url))
db.session.commit()
short_url = get_short_url(short_id)
return jsonify(url=short_url), 200
|
Return short URL if it's already exists
|
Return short URL if it's already exists
|
Python
|
mit
|
dizpers/usb
|
python
|
## Code Before:
from flask import Blueprint, jsonify, request
from usb.models import db, Redirect, DeviceType
from usb.shortener import get_short_id, get_short_url
api = Blueprint('api', __name__)
@api.route('/links')
def get_links():
return jsonify({}), 200
@api.route('/links', methods=['POST'])
def shorten_url():
short_id = get_short_id()
long_url = request.json['url']
for device_type in DeviceType:
db.session.add(Redirect(short_id, device_type, long_url))
db.session.commit()
short_url = get_short_url(short_id)
return jsonify(url=short_url), 200
## Instruction:
Return short URL if it's already exists
## Code After:
from flask import Blueprint, jsonify, request
from usb.models import db, Redirect, DeviceType
from usb.shortener import get_short_id, get_short_url
api = Blueprint('api', __name__)
@api.route('/links')
def get_links():
return jsonify({}), 200
@api.route('/links', methods=['POST'])
def shorten_url():
short_id = get_short_id()
long_url = request.json['url']
redirect = Redirect.query.filter_by(url=long_url).first()
if redirect:
short_url = get_short_url(redirect.short)
return jsonify(url=short_url), 409
for device_type in DeviceType:
db.session.add(Redirect(short_id, device_type, long_url))
db.session.commit()
short_url = get_short_url(short_id)
return jsonify(url=short_url), 200
|
// ... existing code ...
def shorten_url():
short_id = get_short_id()
long_url = request.json['url']
redirect = Redirect.query.filter_by(url=long_url).first()
if redirect:
short_url = get_short_url(redirect.short)
return jsonify(url=short_url), 409
for device_type in DeviceType:
db.session.add(Redirect(short_id, device_type, long_url))
db.session.commit()
// ... rest of the code ...
|
0479d89ce01f064f5cf6e4b743c304cb17bfb19e
|
diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/AabDiff.kt
|
diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/AabDiff.kt
|
package com.jakewharton.diffuse.diff
import com.jakewharton.diffuse.Aab
import com.jakewharton.diffuse.report.DiffReport
import com.jakewharton.diffuse.report.text.AabDiffTextReport
internal class AabDiff(
val oldAab: Aab,
val newAab: Aab
) : BinaryDiff {
inner class ModuleDiff(
val oldModule: Aab.Module,
val newModule: Aab.Module
) {
val archive = ArchiveFilesDiff(oldModule.files, newModule.files)
val dex = DexDiff(oldModule.dexes, oldAab.apiMapping, newModule.dexes, newAab.apiMapping)
val manifest = ManifestDiff(oldModule.manifest, newModule.manifest)
}
val baseModule = ModuleDiff(oldAab.baseModule, newAab.baseModule)
val addedFeatureModules = newAab.featureModules.filterKeys { it !in oldAab.featureModules }
val removedFeatureModules = oldAab.featureModules.filterKeys { it !in newAab.featureModules }
val changedFeatureModules = oldAab.featureModules.filterKeys { it in newAab.featureModules }
.mapValues { (name, oldModule) ->
ModuleDiff(oldModule, newAab.featureModules.getValue(name))
}
override fun toTextReport(): DiffReport = AabDiffTextReport(this)
}
|
package com.jakewharton.diffuse.diff
import com.jakewharton.diffuse.Aab
import com.jakewharton.diffuse.report.DiffReport
import com.jakewharton.diffuse.report.text.AabDiffTextReport
internal class AabDiff(
val oldAab: Aab,
val newAab: Aab
) : BinaryDiff {
inner class ModuleDiff(
val oldModule: Aab.Module,
val newModule: Aab.Module
) {
val archive = ArchiveFilesDiff(oldModule.files, newModule.files)
val dex = DexDiff(oldModule.dexes, oldAab.apiMapping, newModule.dexes, newAab.apiMapping)
val manifest = ManifestDiff(oldModule.manifest, newModule.manifest)
val changed = archive.changed || dex.changed || manifest.changed
}
val baseModule = ModuleDiff(oldAab.baseModule, newAab.baseModule)
val addedFeatureModules = newAab.featureModules.filterKeys { it !in oldAab.featureModules }
val removedFeatureModules = oldAab.featureModules.filterKeys { it !in newAab.featureModules }
val changedFeatureModules = oldAab.featureModules.filterKeys { it in newAab.featureModules }
.mapValues { (name, oldModule) ->
ModuleDiff(oldModule, newAab.featureModules.getValue(name))
}
override fun toTextReport(): DiffReport = AabDiffTextReport(this)
}
|
Add property to determine if a ModuleDiff has changes
|
Add property to determine if a ModuleDiff has changes
|
Kotlin
|
apache-2.0
|
JakeWharton/dex-method-list,JakeWharton/dex-method-list,JakeWharton/dex-method-list
|
kotlin
|
## Code Before:
package com.jakewharton.diffuse.diff
import com.jakewharton.diffuse.Aab
import com.jakewharton.diffuse.report.DiffReport
import com.jakewharton.diffuse.report.text.AabDiffTextReport
internal class AabDiff(
val oldAab: Aab,
val newAab: Aab
) : BinaryDiff {
inner class ModuleDiff(
val oldModule: Aab.Module,
val newModule: Aab.Module
) {
val archive = ArchiveFilesDiff(oldModule.files, newModule.files)
val dex = DexDiff(oldModule.dexes, oldAab.apiMapping, newModule.dexes, newAab.apiMapping)
val manifest = ManifestDiff(oldModule.manifest, newModule.manifest)
}
val baseModule = ModuleDiff(oldAab.baseModule, newAab.baseModule)
val addedFeatureModules = newAab.featureModules.filterKeys { it !in oldAab.featureModules }
val removedFeatureModules = oldAab.featureModules.filterKeys { it !in newAab.featureModules }
val changedFeatureModules = oldAab.featureModules.filterKeys { it in newAab.featureModules }
.mapValues { (name, oldModule) ->
ModuleDiff(oldModule, newAab.featureModules.getValue(name))
}
override fun toTextReport(): DiffReport = AabDiffTextReport(this)
}
## Instruction:
Add property to determine if a ModuleDiff has changes
## Code After:
package com.jakewharton.diffuse.diff
import com.jakewharton.diffuse.Aab
import com.jakewharton.diffuse.report.DiffReport
import com.jakewharton.diffuse.report.text.AabDiffTextReport
internal class AabDiff(
val oldAab: Aab,
val newAab: Aab
) : BinaryDiff {
inner class ModuleDiff(
val oldModule: Aab.Module,
val newModule: Aab.Module
) {
val archive = ArchiveFilesDiff(oldModule.files, newModule.files)
val dex = DexDiff(oldModule.dexes, oldAab.apiMapping, newModule.dexes, newAab.apiMapping)
val manifest = ManifestDiff(oldModule.manifest, newModule.manifest)
val changed = archive.changed || dex.changed || manifest.changed
}
val baseModule = ModuleDiff(oldAab.baseModule, newAab.baseModule)
val addedFeatureModules = newAab.featureModules.filterKeys { it !in oldAab.featureModules }
val removedFeatureModules = oldAab.featureModules.filterKeys { it !in newAab.featureModules }
val changedFeatureModules = oldAab.featureModules.filterKeys { it in newAab.featureModules }
.mapValues { (name, oldModule) ->
ModuleDiff(oldModule, newAab.featureModules.getValue(name))
}
override fun toTextReport(): DiffReport = AabDiffTextReport(this)
}
|
// ... existing code ...
val archive = ArchiveFilesDiff(oldModule.files, newModule.files)
val dex = DexDiff(oldModule.dexes, oldAab.apiMapping, newModule.dexes, newAab.apiMapping)
val manifest = ManifestDiff(oldModule.manifest, newModule.manifest)
val changed = archive.changed || dex.changed || manifest.changed
}
val baseModule = ModuleDiff(oldAab.baseModule, newAab.baseModule)
// ... rest of the code ...
|
7c0fe28d061b6316017683c31b2e027c2d2f017f
|
src/lib/src/network/persistent-cookie-jar.h
|
src/lib/src/network/persistent-cookie-jar.h
|
class QNetworkCookie;
class QObject;
class QUrl;
class PersistentCookieJar : public QNetworkCookieJar
{
Q_OBJECT
public:
explicit PersistentCookieJar(QString filename, QObject *parent = nullptr);
~PersistentCookieJar();
void clear();
bool insertCookies(const QList<QNetworkCookie> &cookies);
virtual QList<QNetworkCookie> cookiesForUrl(const QUrl &url) const override;
virtual bool setCookiesFromUrl(const QList<QNetworkCookie> &cookieList, const QUrl &url) override;
protected:
void save();
void load();
private:
QString m_filename;
mutable QMutex m_mutex;
};
#endif // PERSISTENT_COOKIE_JAR_H
|
class QNetworkCookie;
class QObject;
class QUrl;
/**
* Network cookie jar which loads and stores cookies on a persistent file on disk.
*/
class PersistentCookieJar : public QNetworkCookieJar
{
Q_OBJECT
public:
/**
* Create a new persistent cookie jar.
* @param filename The full path of the file to use to load and store cookies.
* @param parent The Qt parent object.
*/
explicit PersistentCookieJar(QString filename, QObject *parent = nullptr);
/**
* Saves the cookies before destroying the instance.
*/
~PersistentCookieJar() override;
/**
* Remove all cookies from the cookie jar.
*/
void clear();
/**
* Add new cookies to the cookie jar.
* @param cookies The list of cookies to add to the cookie jar.
* @return Whether all cookies were successfully added to the cookie jar.
*/
bool insertCookies(const QList<QNetworkCookie> &cookies);
QList<QNetworkCookie> cookiesForUrl(const QUrl &url) const override;
bool setCookiesFromUrl(const QList<QNetworkCookie> &cookieList, const QUrl &url) override;
protected:
/**
* Save the cookies to the file.
*/
void save();
/**
* Load the cookies from the file.
*/
void load();
private:
QString m_filename;
mutable QMutex m_mutex;
};
#endif // PERSISTENT_COOKIE_JAR_H
|
Add documentation comments for PersistentCookieJar
|
Add documentation comments for PersistentCookieJar
|
C
|
apache-2.0
|
Bionus/imgbrd-grabber,Bionus/imgbrd-grabber,Bionus/imgbrd-grabber,Bionus/imgbrd-grabber,Bionus/imgbrd-grabber,Bionus/imgbrd-grabber
|
c
|
## Code Before:
class QNetworkCookie;
class QObject;
class QUrl;
class PersistentCookieJar : public QNetworkCookieJar
{
Q_OBJECT
public:
explicit PersistentCookieJar(QString filename, QObject *parent = nullptr);
~PersistentCookieJar();
void clear();
bool insertCookies(const QList<QNetworkCookie> &cookies);
virtual QList<QNetworkCookie> cookiesForUrl(const QUrl &url) const override;
virtual bool setCookiesFromUrl(const QList<QNetworkCookie> &cookieList, const QUrl &url) override;
protected:
void save();
void load();
private:
QString m_filename;
mutable QMutex m_mutex;
};
#endif // PERSISTENT_COOKIE_JAR_H
## Instruction:
Add documentation comments for PersistentCookieJar
## Code After:
class QNetworkCookie;
class QObject;
class QUrl;
/**
* Network cookie jar which loads and stores cookies on a persistent file on disk.
*/
class PersistentCookieJar : public QNetworkCookieJar
{
Q_OBJECT
public:
/**
* Create a new persistent cookie jar.
* @param filename The full path of the file to use to load and store cookies.
* @param parent The Qt parent object.
*/
explicit PersistentCookieJar(QString filename, QObject *parent = nullptr);
/**
* Saves the cookies before destroying the instance.
*/
~PersistentCookieJar() override;
/**
* Remove all cookies from the cookie jar.
*/
void clear();
/**
* Add new cookies to the cookie jar.
* @param cookies The list of cookies to add to the cookie jar.
* @return Whether all cookies were successfully added to the cookie jar.
*/
bool insertCookies(const QList<QNetworkCookie> &cookies);
QList<QNetworkCookie> cookiesForUrl(const QUrl &url) const override;
bool setCookiesFromUrl(const QList<QNetworkCookie> &cookieList, const QUrl &url) override;
protected:
/**
* Save the cookies to the file.
*/
void save();
/**
* Load the cookies from the file.
*/
void load();
private:
QString m_filename;
mutable QMutex m_mutex;
};
#endif // PERSISTENT_COOKIE_JAR_H
|
# ... existing code ...
class QObject;
class QUrl;
/**
* Network cookie jar which loads and stores cookies on a persistent file on disk.
*/
class PersistentCookieJar : public QNetworkCookieJar
{
Q_OBJECT
public:
/**
* Create a new persistent cookie jar.
* @param filename The full path of the file to use to load and store cookies.
* @param parent The Qt parent object.
*/
explicit PersistentCookieJar(QString filename, QObject *parent = nullptr);
/**
* Saves the cookies before destroying the instance.
*/
~PersistentCookieJar() override;
/**
* Remove all cookies from the cookie jar.
*/
void clear();
/**
* Add new cookies to the cookie jar.
* @param cookies The list of cookies to add to the cookie jar.
* @return Whether all cookies were successfully added to the cookie jar.
*/
bool insertCookies(const QList<QNetworkCookie> &cookies);
QList<QNetworkCookie> cookiesForUrl(const QUrl &url) const override;
bool setCookiesFromUrl(const QList<QNetworkCookie> &cookieList, const QUrl &url) override;
protected:
/**
* Save the cookies to the file.
*/
void save();
/**
* Load the cookies from the file.
*/
void load();
private:
# ... rest of the code ...
|
b28c037d64ac7cee7e2c7d9d33b128d62aa4df8a
|
src/gallium/drivers/r300/r300_public.h
|
src/gallium/drivers/r300/r300_public.h
|
struct radeon_winsys;
struct pipe_screen* r300_screen_create(struct radeon_winsys *rws);
#endif
|
extern "C" {
#endif
struct radeon_winsys;
struct pipe_screen* r300_screen_create(struct radeon_winsys *rws);
#ifdef __cplusplus
} // extern "C"
#endif
#endif
|
Fix build, invalid extern "C" around header inclusion.
|
r300g: Fix build, invalid extern "C" around header inclusion.
A previous patch to fix header inclusion within extern "C" neglected
to fix the occurences of this pattern in r300 files.
When the helper to detect this issue was pushed to master, it broke
the build for the r300 driver. This patch fixes the r300 build.
Bugzilla: https://bugs.freedesktop.org/show_bug.cgi?id=89477
Reviewed-by: Ilia Mirkin <[email protected]>
Reviewed-by: Jose Fonseca <[email protected]>
|
C
|
mit
|
metora/MesaGLSLCompiler,metora/MesaGLSLCompiler,metora/MesaGLSLCompiler
|
c
|
## Code Before:
struct radeon_winsys;
struct pipe_screen* r300_screen_create(struct radeon_winsys *rws);
#endif
## Instruction:
r300g: Fix build, invalid extern "C" around header inclusion.
A previous patch to fix header inclusion within extern "C" neglected
to fix the occurences of this pattern in r300 files.
When the helper to detect this issue was pushed to master, it broke
the build for the r300 driver. This patch fixes the r300 build.
Bugzilla: https://bugs.freedesktop.org/show_bug.cgi?id=89477
Reviewed-by: Ilia Mirkin <[email protected]>
Reviewed-by: Jose Fonseca <[email protected]>
## Code After:
extern "C" {
#endif
struct radeon_winsys;
struct pipe_screen* r300_screen_create(struct radeon_winsys *rws);
#ifdef __cplusplus
} // extern "C"
#endif
#endif
|
...
extern "C" {
#endif
struct radeon_winsys;
struct pipe_screen* r300_screen_create(struct radeon_winsys *rws);
#ifdef __cplusplus
} // extern "C"
#endif
#endif
...
|
31c0863d088488da5dd85e2cbe3c01c6b01aa4a2
|
system_tests/test_default.py
|
system_tests/test_default.py
|
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
|
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
|
Fix system tests when running on GCE
|
Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.
|
Python
|
apache-2.0
|
googleapis/google-auth-library-python,googleapis/google-auth-library-python
|
python
|
## Code Before:
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
## Instruction:
Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.
## Code After:
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
|
...
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
...
|
09bc3137328fbefe41044b5124f3c6a7abaa8982
|
wqflask/tests/base/test_general_object.py
|
wqflask/tests/base/test_general_object.py
|
import unittest
from base.GeneralObject import GeneralObject
class TestGeneralObjectTests(unittest.TestCase):
"""
Test the GeneralObject base class
"""
def test_object_contents(self):
"""Test whether base contents are stored properly"""
test_obj = GeneralObject("a", "b", "c")
self.assertEqual("abc", ''.join(test_obj.contents))
self.assertEqual(len(test_obj), 0)
def test_object_dict(self):
"""Test whether the base class is printed properly"""
test_obj = GeneralObject("a", name="test", value=1)
self.assertEqual(str(test_obj), "value = 1\nname = test\n")
self.assertEqual(
repr(test_obj), "value = 1\nname = test\ncontents = ['a']\n")
self.assertEqual(len(test_obj), 2)
self.assertEqual(getattr(test_obj, "value"), 1)
self.assertEqual(test_obj["value"], 1)
test_obj["test"] = 1
self.assertEqual(test_obj["test"], 1)
|
import unittest
from base.GeneralObject import GeneralObject
class TestGeneralObjectTests(unittest.TestCase):
"""
Test the GeneralObject base class
"""
def test_object_contents(self):
"""Test whether base contents are stored properly"""
test_obj = GeneralObject("a", "b", "c")
self.assertEqual("abc", ''.join(test_obj.contents))
self.assertEqual(len(test_obj), 0)
def test_object_dict(self):
"""Test whether the base class is printed properly"""
test_obj = GeneralObject("a", name="test", value=1)
self.assertEqual(str(test_obj), "value = 1\nname = test\n")
self.assertEqual(
repr(test_obj), "value = 1\nname = test\ncontents = ['a']\n")
self.assertEqual(len(test_obj), 2)
self.assertEqual(test_obj["value"], 1)
test_obj["test"] = 1
self.assertEqual(test_obj["test"], 1)
def test_get_attribute(self):
"Test that getattr works"
test_obj = GeneralObject("a", name="test", value=1)
self.assertEqual(getattr(test_obj, "value", None), 1)
self.assertEqual(getattr(test_obj, "non-existent", None), None)
def test_object_comparisons(self):
"Test that 2 objects of the same length are equal"
test_obj1 = GeneralObject("a", name="test", value=1)
test_obj2 = GeneralObject("b", name="test2", value=2)
test_obj3 = GeneralObject("a", name="test", x=1, y=2)
self.assertTrue(test_obj1 == test_obj2 )
self.assertFalse(test_obj1 == test_obj3 )
|
Add more tests for general_object
|
Add more tests for general_object
* wqflask/tests/base/test_general_object.py: test getattr() and `==`
|
Python
|
agpl-3.0
|
genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2
|
python
|
## Code Before:
import unittest
from base.GeneralObject import GeneralObject
class TestGeneralObjectTests(unittest.TestCase):
"""
Test the GeneralObject base class
"""
def test_object_contents(self):
"""Test whether base contents are stored properly"""
test_obj = GeneralObject("a", "b", "c")
self.assertEqual("abc", ''.join(test_obj.contents))
self.assertEqual(len(test_obj), 0)
def test_object_dict(self):
"""Test whether the base class is printed properly"""
test_obj = GeneralObject("a", name="test", value=1)
self.assertEqual(str(test_obj), "value = 1\nname = test\n")
self.assertEqual(
repr(test_obj), "value = 1\nname = test\ncontents = ['a']\n")
self.assertEqual(len(test_obj), 2)
self.assertEqual(getattr(test_obj, "value"), 1)
self.assertEqual(test_obj["value"], 1)
test_obj["test"] = 1
self.assertEqual(test_obj["test"], 1)
## Instruction:
Add more tests for general_object
* wqflask/tests/base/test_general_object.py: test getattr() and `==`
## Code After:
import unittest
from base.GeneralObject import GeneralObject
class TestGeneralObjectTests(unittest.TestCase):
"""
Test the GeneralObject base class
"""
def test_object_contents(self):
"""Test whether base contents are stored properly"""
test_obj = GeneralObject("a", "b", "c")
self.assertEqual("abc", ''.join(test_obj.contents))
self.assertEqual(len(test_obj), 0)
def test_object_dict(self):
"""Test whether the base class is printed properly"""
test_obj = GeneralObject("a", name="test", value=1)
self.assertEqual(str(test_obj), "value = 1\nname = test\n")
self.assertEqual(
repr(test_obj), "value = 1\nname = test\ncontents = ['a']\n")
self.assertEqual(len(test_obj), 2)
self.assertEqual(test_obj["value"], 1)
test_obj["test"] = 1
self.assertEqual(test_obj["test"], 1)
def test_get_attribute(self):
"Test that getattr works"
test_obj = GeneralObject("a", name="test", value=1)
self.assertEqual(getattr(test_obj, "value", None), 1)
self.assertEqual(getattr(test_obj, "non-existent", None), None)
def test_object_comparisons(self):
"Test that 2 objects of the same length are equal"
test_obj1 = GeneralObject("a", name="test", value=1)
test_obj2 = GeneralObject("b", name="test2", value=2)
test_obj3 = GeneralObject("a", name="test", x=1, y=2)
self.assertTrue(test_obj1 == test_obj2 )
self.assertFalse(test_obj1 == test_obj3 )
|
# ... existing code ...
self.assertEqual(
repr(test_obj), "value = 1\nname = test\ncontents = ['a']\n")
self.assertEqual(len(test_obj), 2)
self.assertEqual(test_obj["value"], 1)
test_obj["test"] = 1
self.assertEqual(test_obj["test"], 1)
def test_get_attribute(self):
"Test that getattr works"
test_obj = GeneralObject("a", name="test", value=1)
self.assertEqual(getattr(test_obj, "value", None), 1)
self.assertEqual(getattr(test_obj, "non-existent", None), None)
def test_object_comparisons(self):
"Test that 2 objects of the same length are equal"
test_obj1 = GeneralObject("a", name="test", value=1)
test_obj2 = GeneralObject("b", name="test2", value=2)
test_obj3 = GeneralObject("a", name="test", x=1, y=2)
self.assertTrue(test_obj1 == test_obj2 )
self.assertFalse(test_obj1 == test_obj3 )
# ... rest of the code ...
|
1f797788c1e8230e260ef394fbf0a98278816992
|
app/src/main/java/miwax/java_conf/gr/jp/frugalitycalc/model/Operation.java
|
app/src/main/java/miwax/java_conf/gr/jp/frugalitycalc/model/Operation.java
|
package miwax.java_conf.gr.jp.frugalitycalc.model;
import java.math.BigDecimal;
public enum Operation {
PLUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.add(y);} },
MINUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.subtract(y);} },
MULTIPLE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.multiply(y);} },
DIVIDE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.divide(y, 8, BigDecimal.ROUND_HALF_UP);} };
abstract BigDecimal apply(BigDecimal x, BigDecimal y);
}
|
package miwax.java_conf.gr.jp.frugalitycalc.model;
import java.math.BigDecimal;
import java.math.MathContext;
public enum Operation {
PLUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.add(y);} },
MINUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.subtract(y);} },
MULTIPLE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.multiply(y);} },
DIVIDE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.divide(y, MathContext.DECIMAL128);} };
abstract BigDecimal apply(BigDecimal x, BigDecimal y);
}
|
Change the accuracy of the division to the IEEE 754R Decimal28
|
Change the accuracy of the division to the IEEE 754R Decimal28
|
Java
|
mit
|
tomoya0x00/FrugalityCalc
|
java
|
## Code Before:
package miwax.java_conf.gr.jp.frugalitycalc.model;
import java.math.BigDecimal;
public enum Operation {
PLUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.add(y);} },
MINUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.subtract(y);} },
MULTIPLE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.multiply(y);} },
DIVIDE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.divide(y, 8, BigDecimal.ROUND_HALF_UP);} };
abstract BigDecimal apply(BigDecimal x, BigDecimal y);
}
## Instruction:
Change the accuracy of the division to the IEEE 754R Decimal28
## Code After:
package miwax.java_conf.gr.jp.frugalitycalc.model;
import java.math.BigDecimal;
import java.math.MathContext;
public enum Operation {
PLUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.add(y);} },
MINUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.subtract(y);} },
MULTIPLE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.multiply(y);} },
DIVIDE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.divide(y, MathContext.DECIMAL128);} };
abstract BigDecimal apply(BigDecimal x, BigDecimal y);
}
|
...
package miwax.java_conf.gr.jp.frugalitycalc.model;
import java.math.BigDecimal;
import java.math.MathContext;
public enum Operation {
PLUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.add(y);} },
MINUS { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.subtract(y);} },
MULTIPLE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.multiply(y);} },
DIVIDE { BigDecimal apply(BigDecimal x, BigDecimal y) {return x.divide(y, MathContext.DECIMAL128);} };
abstract BigDecimal apply(BigDecimal x, BigDecimal y);
}
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.