commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
2613ca001c483fa3cae37389724929704c127f47
|
finagle-core/src/main/java/com/twitter/finagle/service/RetryBudgets.java
|
finagle-core/src/main/java/com/twitter/finagle/service/RetryBudgets.java
|
package com.twitter.finagle.service;
import com.twitter.util.Duration;
import com.twitter.util.Stopwatch$;
/**
* Java APIs for {@link RetryBudget}.
*/
public final class RetryBudgets {
private RetryBudgets() {
throw new IllegalStateException();
}
/**
* See {@link RetryBudget$#Empty()}
*/
public static final RetryBudget EMPTY = RetryBudget$.MODULE$.Empty();
/**
* See {@link RetryBudget$#Infinite()}
*/
public static final RetryBudget INFINITE = RetryBudget$.MODULE$.Infinite();
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget() {
return RetryBudget$.MODULE$.apply();
}
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget(
Duration ttl,
int minRetriesPerSec,
double percentCanRetry
) {
return RetryBudget$.MODULE$.apply(
ttl,
minRetriesPerSec,
percentCanRetry,
Stopwatch$.MODULE$.systemMillis());
}
}
|
package com.twitter.finagle.service;
import com.twitter.util.Duration;
import com.twitter.util.Stopwatches;
/**
* Java APIs for {@link RetryBudget}.
*/
public final class RetryBudgets {
private RetryBudgets() {
throw new IllegalStateException();
}
/**
* See {@link RetryBudget$#Empty()}
*/
public static final RetryBudget EMPTY = RetryBudget$.MODULE$.Empty();
/**
* See {@link RetryBudget$#Infinite()}
*/
public static final RetryBudget INFINITE = RetryBudget$.MODULE$.Infinite();
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget() {
return RetryBudget$.MODULE$.apply();
}
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget(
Duration ttl,
int minRetriesPerSec,
double percentCanRetry
) {
return RetryBudget$.MODULE$.apply(
ttl,
minRetriesPerSec,
percentCanRetry,
Stopwatches.systemMillis());
}
}
|
Add Java friendly API for Stopwatch
|
util-core: Add Java friendly API for Stopwatch
Problem
The `c.t.u.Stopwatch` companion object does not have a Java friendly API.
Solution
Introduce `c.t.u.Stopwatches` for Java users.
Result
Less `MODULE$s`.
RB_ID=808474
TBR=true
|
Java
|
apache-2.0
|
sveinnfannar/finagle,mkhq/finagle,adriancole/finagle,spockz/finagle,sveinnfannar/finagle,adriancole/finagle,luciferous/finagle,lukiano/finagle,BuoyantIO/finagle,lukiano/finagle,koshelev/finagle,twitter/finagle,adriancole/finagle,spockz/finagle,BuoyantIO/finagle,koshelev/finagle,koshelev/finagle,spockz/finagle,luciferous/finagle,mkhq/finagle,adriancole/finagle,lukiano/finagle,luciferous/finagle,sveinnfannar/finagle,twitter/finagle,BuoyantIO/finagle,luciferous/finagle,koshelev/finagle,BuoyantIO/finagle,koshelev/finagle,BuoyantIO/finagle,twitter/finagle,luciferous/finagle,sveinnfannar/finagle,twitter/finagle,spockz/finagle,mkhq/finagle,lukiano/finagle,sveinnfannar/finagle,mkhq/finagle,adriancole/finagle,spockz/finagle,lukiano/finagle,luciferous/finagle,sveinnfannar/finagle,BuoyantIO/finagle,mkhq/finagle,spockz/finagle,adriancole/finagle,mkhq/finagle,lukiano/finagle,koshelev/finagle,twitter/finagle
|
java
|
## Code Before:
package com.twitter.finagle.service;
import com.twitter.util.Duration;
import com.twitter.util.Stopwatch$;
/**
* Java APIs for {@link RetryBudget}.
*/
public final class RetryBudgets {
private RetryBudgets() {
throw new IllegalStateException();
}
/**
* See {@link RetryBudget$#Empty()}
*/
public static final RetryBudget EMPTY = RetryBudget$.MODULE$.Empty();
/**
* See {@link RetryBudget$#Infinite()}
*/
public static final RetryBudget INFINITE = RetryBudget$.MODULE$.Infinite();
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget() {
return RetryBudget$.MODULE$.apply();
}
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget(
Duration ttl,
int minRetriesPerSec,
double percentCanRetry
) {
return RetryBudget$.MODULE$.apply(
ttl,
minRetriesPerSec,
percentCanRetry,
Stopwatch$.MODULE$.systemMillis());
}
}
## Instruction:
util-core: Add Java friendly API for Stopwatch
Problem
The `c.t.u.Stopwatch` companion object does not have a Java friendly API.
Solution
Introduce `c.t.u.Stopwatches` for Java users.
Result
Less `MODULE$s`.
RB_ID=808474
TBR=true
## Code After:
package com.twitter.finagle.service;
import com.twitter.util.Duration;
import com.twitter.util.Stopwatches;
/**
* Java APIs for {@link RetryBudget}.
*/
public final class RetryBudgets {
private RetryBudgets() {
throw new IllegalStateException();
}
/**
* See {@link RetryBudget$#Empty()}
*/
public static final RetryBudget EMPTY = RetryBudget$.MODULE$.Empty();
/**
* See {@link RetryBudget$#Infinite()}
*/
public static final RetryBudget INFINITE = RetryBudget$.MODULE$.Infinite();
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget() {
return RetryBudget$.MODULE$.apply();
}
/**
* See {@link RetryBudget$#apply()}
*/
public static RetryBudget newRetryBudget(
Duration ttl,
int minRetriesPerSec,
double percentCanRetry
) {
return RetryBudget$.MODULE$.apply(
ttl,
minRetriesPerSec,
percentCanRetry,
Stopwatches.systemMillis());
}
}
|
...
package com.twitter.finagle.service;
import com.twitter.util.Duration;
import com.twitter.util.Stopwatches;
/**
* Java APIs for {@link RetryBudget}.
...
ttl,
minRetriesPerSec,
percentCanRetry,
Stopwatches.systemMillis());
}
}
...
|
2e63438deb6f733e7e905f4ea299aa0bdce88b3c
|
changes/api/author_build_index.py
|
changes/api/author_build_index.py
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
Validate author_id and return 404 for missing data
|
Validate author_id and return 404 for missing data
|
Python
|
apache-2.0
|
wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes
|
python
|
## Code Before:
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
## Instruction:
Validate author_id and return 404 for missing data
## Code After:
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
// ... existing code ...
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
// ... modified code ...
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
...
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
// ... rest of the code ...
|
7dd228d7eaad6b1f37ff3c4d954aebe0ffa99170
|
tests/test_targets/test_targets.py
|
tests/test_targets/test_targets.py
|
import os
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu']
assert mcu['mcu']['name']
assert mcu['mcu']['core']
|
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
# Cehck for required info for targets
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
# Check for required info in mcu
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu'][0]
assert mcu['mcu']['name'][0]
assert mcu['mcu']['core'][0]
|
Test - targets test fix mcu validity indexes
|
Test - targets test fix mcu validity indexes
|
Python
|
apache-2.0
|
project-generator/project_generator_definitions,0xc0170/project_generator_definitions,ohagendorf/project_generator_definitions
|
python
|
## Code Before:
import os
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu']
assert mcu['mcu']['name']
assert mcu['mcu']['core']
## Instruction:
Test - targets test fix mcu validity indexes
## Code After:
from unittest import TestCase
from project_generator_definitions.definitions import ProGenTargets
class TestAllTargets(TestCase):
"""test all targets"""
def setUp(self):
self.progen_target = ProGenTargets()
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
# Cehck for required info for targets
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
# Check for required info in mcu
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu'][0]
assert mcu['mcu']['name'][0]
assert mcu['mcu']['core'][0]
|
// ... existing code ...
from unittest import TestCase
// ... modified code ...
self.targets_list = self.progen_target.get_targets()
def test_targets_validity(self):
# Cehck for required info for targets
for target in self.targets_list:
record = self.progen_target.get_target_record(target)
assert record['target']['name'][0]
...
assert record['target']['mcu'][0]
def test_targets_mcu_validity(self):
# Check for required info in mcu
for target in self.targets_list:
mcu = self.progen_target.get_mcu_record(target)
assert mcu['mcu'][0]
assert mcu['mcu']['name'][0]
assert mcu['mcu']['core'][0]
// ... rest of the code ...
|
7461666cde3c0206058d10f2341e0a57bf33e504
|
src/renderers/status.py
|
src/renderers/status.py
|
from flask import Blueprint
from models import db, Status
import json
status_renderer = Blueprint('status', __name__)
@status_renderer.route('/status/<int:user_id>')
def get_tweet(user_id):
status = db.session.query(Status).order_by(Status.id.desc()).one()
return json.dumps({'type' : 'text',
'status_text' : status.status_text,
'posted_by' : status.posted_by,
'image_url' : status.pic_url,
'profile_pic': status.profile_pic
})
|
from flask import Blueprint
from models import db, Status
import json
status_renderer = Blueprint('status', __name__)
@status_renderer.route('/status')
def get_status():
status = db.session.query(Status).order_by(Status.id.desc()).one()
return json.dumps({'type' : 'text',
'status_text' : status.status_text,
'posted_by' : status.posted_by,
'image_url' : status.pic_url,
'profile_pic': status.profile_pic
})
|
Remove user_id requirement for FB endpoint
|
Remove user_id requirement for FB endpoint
|
Python
|
mit
|
ndm25/notifyable
|
python
|
## Code Before:
from flask import Blueprint
from models import db, Status
import json
status_renderer = Blueprint('status', __name__)
@status_renderer.route('/status/<int:user_id>')
def get_tweet(user_id):
status = db.session.query(Status).order_by(Status.id.desc()).one()
return json.dumps({'type' : 'text',
'status_text' : status.status_text,
'posted_by' : status.posted_by,
'image_url' : status.pic_url,
'profile_pic': status.profile_pic
})
## Instruction:
Remove user_id requirement for FB endpoint
## Code After:
from flask import Blueprint
from models import db, Status
import json
status_renderer = Blueprint('status', __name__)
@status_renderer.route('/status')
def get_status():
status = db.session.query(Status).order_by(Status.id.desc()).one()
return json.dumps({'type' : 'text',
'status_text' : status.status_text,
'posted_by' : status.posted_by,
'image_url' : status.pic_url,
'profile_pic': status.profile_pic
})
|
...
status_renderer = Blueprint('status', __name__)
@status_renderer.route('/status')
def get_status():
status = db.session.query(Status).order_by(Status.id.desc()).one()
return json.dumps({'type' : 'text',
'status_text' : status.status_text,
...
|
320d9b7d54a720d4685b27d6d5b646f130265c5a
|
modules/views-dsl/build.gradle.kts
|
modules/views-dsl/build.gradle.kts
|
/*
* Copyright 2019-2021 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license.
*/
plugins {
id("com.android.library")
kotlin("multiplatform")
publish
}
android {
setDefaults(generateBuildConfig = true)
}
kotlin {
android()
configure(targets) { configureMavenPublication(publishReleaseVariantOnly = false) }
sourceSets {
commonMain.dependencies {
api(splitties("experimental"))
}
androidMain.dependencies {
api(splitties("views"))
api(AndroidX.annotation)
implementation(splitties("collections"))
implementation(splitties("exceptions"))
implementation(splitties("appctx"))
}
all {
languageSettings.apply {
optIn("kotlin.contracts.ExperimentalContracts")
optIn("splitties.experimental.InternalSplittiesApi")
}
}
}
}
tasks.withType<org.jetbrains.kotlin.gradle.tasks.KotlinCompile> {
kotlinOptions.useOldBackend = true //TODO: Remove when https://youtrack.jetbrains.com/issue/KT-44972 is addressed.
// See this comment on why it's needed: https://youtrack.jetbrains.com/issue/KT-44972#focus=Comments-27-5014161.0-0
}
|
/*
* Copyright 2019-2021 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license.
*/
plugins {
id("com.android.library")
kotlin("multiplatform")
publish
}
android {
setDefaults(generateBuildConfig = true)
}
kotlin {
android()
configure(targets) { configureMavenPublication(publishReleaseVariantOnly = false) }
sourceSets {
commonMain.dependencies {
api(splitties("experimental"))
}
androidMain.dependencies {
api(splitties("views"))
api(AndroidX.annotation)
implementation(splitties("collections"))
implementation(splitties("exceptions"))
implementation(splitties("appctx"))
}
all {
languageSettings.apply {
optIn("kotlin.contracts.ExperimentalContracts")
optIn("splitties.experimental.InternalSplittiesApi")
}
}
}
}
|
Remove no longer needed workaround that used old Kotlin backend
|
Remove no longer needed workaround that used old Kotlin backend
|
Kotlin
|
apache-2.0
|
LouisCAD/Splitties,LouisCAD/Splitties,LouisCAD/Splitties
|
kotlin
|
## Code Before:
/*
* Copyright 2019-2021 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license.
*/
plugins {
id("com.android.library")
kotlin("multiplatform")
publish
}
android {
setDefaults(generateBuildConfig = true)
}
kotlin {
android()
configure(targets) { configureMavenPublication(publishReleaseVariantOnly = false) }
sourceSets {
commonMain.dependencies {
api(splitties("experimental"))
}
androidMain.dependencies {
api(splitties("views"))
api(AndroidX.annotation)
implementation(splitties("collections"))
implementation(splitties("exceptions"))
implementation(splitties("appctx"))
}
all {
languageSettings.apply {
optIn("kotlin.contracts.ExperimentalContracts")
optIn("splitties.experimental.InternalSplittiesApi")
}
}
}
}
tasks.withType<org.jetbrains.kotlin.gradle.tasks.KotlinCompile> {
kotlinOptions.useOldBackend = true //TODO: Remove when https://youtrack.jetbrains.com/issue/KT-44972 is addressed.
// See this comment on why it's needed: https://youtrack.jetbrains.com/issue/KT-44972#focus=Comments-27-5014161.0-0
}
## Instruction:
Remove no longer needed workaround that used old Kotlin backend
## Code After:
/*
* Copyright 2019-2021 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license.
*/
plugins {
id("com.android.library")
kotlin("multiplatform")
publish
}
android {
setDefaults(generateBuildConfig = true)
}
kotlin {
android()
configure(targets) { configureMavenPublication(publishReleaseVariantOnly = false) }
sourceSets {
commonMain.dependencies {
api(splitties("experimental"))
}
androidMain.dependencies {
api(splitties("views"))
api(AndroidX.annotation)
implementation(splitties("collections"))
implementation(splitties("exceptions"))
implementation(splitties("appctx"))
}
all {
languageSettings.apply {
optIn("kotlin.contracts.ExperimentalContracts")
optIn("splitties.experimental.InternalSplittiesApi")
}
}
}
}
|
# ... existing code ...
}
}
}
# ... rest of the code ...
|
e07b2e24cddc8a2e2d1c8838e8509b2009344714
|
util/BaseModel.py
|
util/BaseModel.py
|
from google.appengine.ext import ndb
class BaseModel(ndb.Model):
date_created = ndb.DateTimeProperty(auto_now_add=True, required=True)
date_modified = ndb.DateTimeProperty(auto_now=True, required=True)
|
from google.appengine.ext import ndb
class BaseModel(ndb.Model):
date_created = ndb.DateTimeProperty(auto_now_add=True, required=True)
date_modified = ndb.DateTimeProperty(auto_now=True, required=True)
@classmethod
def from_urlsafe(cls, urlsafe):
key = ndb.Key(urlsafe=urlsafe)
obj = key.get()
if obj and isinstance(obj, cls):
return obj
|
Add a utility method to get instances from urlsafe key.
|
Add a utility method to get instances from urlsafe key.
|
Python
|
apache-2.0
|
kkinder/GAEStarterKit,kkinder/GAEStarterKit,kkinder/GAEStarterKit
|
python
|
## Code Before:
from google.appengine.ext import ndb
class BaseModel(ndb.Model):
date_created = ndb.DateTimeProperty(auto_now_add=True, required=True)
date_modified = ndb.DateTimeProperty(auto_now=True, required=True)
## Instruction:
Add a utility method to get instances from urlsafe key.
## Code After:
from google.appengine.ext import ndb
class BaseModel(ndb.Model):
date_created = ndb.DateTimeProperty(auto_now_add=True, required=True)
date_modified = ndb.DateTimeProperty(auto_now=True, required=True)
@classmethod
def from_urlsafe(cls, urlsafe):
key = ndb.Key(urlsafe=urlsafe)
obj = key.get()
if obj and isinstance(obj, cls):
return obj
|
...
class BaseModel(ndb.Model):
date_created = ndb.DateTimeProperty(auto_now_add=True, required=True)
date_modified = ndb.DateTimeProperty(auto_now=True, required=True)
@classmethod
def from_urlsafe(cls, urlsafe):
key = ndb.Key(urlsafe=urlsafe)
obj = key.get()
if obj and isinstance(obj, cls):
return obj
...
|
68f25d536945e06fae814bdf19218bc148f6cc93
|
backend/scripts/updatedf.py
|
backend/scripts/updatedf.py
|
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
|
import hashlib
import os
import rethinkdb as r
def main():
conn = r.connect('localhost', 28015, db='materialscommons')
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
path = os.path.join(root, f)
with open(path) as fd:
data = fd.read()
hash = hashlib.md5(data).hexdigest()
s = os.stat(path).st_size
r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn)
print "%s:%s:%d" %(path, hash, s)
if __name__ == "__main__":
main()
|
Update script to write results to the database.
|
Update script to write results to the database.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
python
|
## Code Before:
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
## Instruction:
Update script to write results to the database.
## Code After:
import hashlib
import os
import rethinkdb as r
def main():
conn = r.connect('localhost', 28015, db='materialscommons')
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
path = os.path.join(root, f)
with open(path) as fd:
data = fd.read()
hash = hashlib.md5(data).hexdigest()
s = os.stat(path).st_size
r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn)
print "%s:%s:%d" %(path, hash, s)
if __name__ == "__main__":
main()
|
...
import hashlib
import os
import rethinkdb as r
def main():
conn = r.connect('localhost', 28015, db='materialscommons')
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
path = os.path.join(root, f)
with open(path) as fd:
data = fd.read()
hash = hashlib.md5(data).hexdigest()
s = os.stat(path).st_size
r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn)
print "%s:%s:%d" %(path, hash, s)
if __name__ == "__main__":
main()
...
|
1cfaf387af8e373d2bf3fdc8d6144f889489ba13
|
esis/cli.py
|
esis/cli.py
|
"""Elastic Search Index & Search."""
import argparse
def main():
"""Entry point for the esis.py script."""
args = parse_arguments()
print args
def parse_arguments():
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
index = subparsers.add_parser('index', help='Index SQLite database files')
index.add_argument('directory', help='Base directory')
search = subparsers.add_parser('search', help='Search indexed data')
search.add_argument('query', help='Search query')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
|
"""Elastic Search Index & Search."""
import argparse
import os
def main():
"""Entry point for the esis.py script."""
args = parse_arguments()
print args
def valid_directory(path):
"""Directory validation."""
if not os.path.isdir(path):
raise argparse.ArgumentTypeError(
'{!r} is not a valid directory'.format(path))
if not os.access(path, os.R_OK | os.X_OK):
raise argparse.ArgumentTypeError(
'not enough permissions to explore {!r}'.format(path))
return path
def parse_arguments():
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
index = subparsers.add_parser('index', help='Index SQLite database files')
index.add_argument('directory', type=valid_directory, help='Base directory')
search = subparsers.add_parser('search', help='Search indexed data')
search.add_argument('query', help='Search query')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
|
Add directory validation to argument parsing
|
Add directory validation to argument parsing
|
Python
|
mit
|
jcollado/esis
|
python
|
## Code Before:
"""Elastic Search Index & Search."""
import argparse
def main():
"""Entry point for the esis.py script."""
args = parse_arguments()
print args
def parse_arguments():
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
index = subparsers.add_parser('index', help='Index SQLite database files')
index.add_argument('directory', help='Base directory')
search = subparsers.add_parser('search', help='Search indexed data')
search.add_argument('query', help='Search query')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
## Instruction:
Add directory validation to argument parsing
## Code After:
"""Elastic Search Index & Search."""
import argparse
import os
def main():
"""Entry point for the esis.py script."""
args = parse_arguments()
print args
def valid_directory(path):
"""Directory validation."""
if not os.path.isdir(path):
raise argparse.ArgumentTypeError(
'{!r} is not a valid directory'.format(path))
if not os.access(path, os.R_OK | os.X_OK):
raise argparse.ArgumentTypeError(
'not enough permissions to explore {!r}'.format(path))
return path
def parse_arguments():
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
index = subparsers.add_parser('index', help='Index SQLite database files')
index.add_argument('directory', type=valid_directory, help='Base directory')
search = subparsers.add_parser('search', help='Search indexed data')
search.add_argument('query', help='Search query')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
|
// ... existing code ...
"""Elastic Search Index & Search."""
import argparse
import os
def main():
// ... modified code ...
"""Entry point for the esis.py script."""
args = parse_arguments()
print args
def valid_directory(path):
"""Directory validation."""
if not os.path.isdir(path):
raise argparse.ArgumentTypeError(
'{!r} is not a valid directory'.format(path))
if not os.access(path, os.R_OK | os.X_OK):
raise argparse.ArgumentTypeError(
'not enough permissions to explore {!r}'.format(path))
return path
def parse_arguments():
"""Parse command line arguments.
...
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
index = subparsers.add_parser('index', help='Index SQLite database files')
index.add_argument('directory', type=valid_directory, help='Base directory')
search = subparsers.add_parser('search', help='Search indexed data')
search.add_argument('query', help='Search query')
args = parser.parse_args()
// ... rest of the code ...
|
7665e2b0af042948dfc7a1814275cd3309f5f6cf
|
pages/tests/__init__.py
|
pages/tests/__init__.py
|
"""Django page CMS test suite module"""
from djangox.test.depth import alltests
def suite():
return alltests(__file__, __name__)
|
"""Django page CMS test suite module"""
import unittest
def suite():
suite = unittest.TestSuite()
from pages.tests.test_functionnal import FunctionnalTestCase
from pages.tests.test_unit import UnitTestCase
from pages.tests.test_regression import RegressionTestCase
from pages.tests.test_pages_link import LinkTestCase
from pages.tests.test_auto_render import AutoRenderTestCase
suite.addTest(unittest.makeSuite(FunctionnalTestCase))
suite.addTest(unittest.makeSuite(UnitTestCase))
suite.addTest(unittest.makeSuite(RegressionTestCase))
suite.addTest(unittest.makeSuite(LinkTestCase))
suite.addTest(unittest.makeSuite(AutoRenderTestCase))
return suite
|
Remove the dependency to django-unittest-depth
|
Remove the dependency to django-unittest-depth
|
Python
|
bsd-3-clause
|
remik/django-page-cms,oliciv/django-page-cms,remik/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,batiste/django-page-cms,batiste/django-page-cms,akaihola/django-page-cms,oliciv/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,akaihola/django-page-cms,akaihola/django-page-cms,batiste/django-page-cms,pombredanne/django-page-cms-1,oliciv/django-page-cms
|
python
|
## Code Before:
"""Django page CMS test suite module"""
from djangox.test.depth import alltests
def suite():
return alltests(__file__, __name__)
## Instruction:
Remove the dependency to django-unittest-depth
## Code After:
"""Django page CMS test suite module"""
import unittest
def suite():
suite = unittest.TestSuite()
from pages.tests.test_functionnal import FunctionnalTestCase
from pages.tests.test_unit import UnitTestCase
from pages.tests.test_regression import RegressionTestCase
from pages.tests.test_pages_link import LinkTestCase
from pages.tests.test_auto_render import AutoRenderTestCase
suite.addTest(unittest.makeSuite(FunctionnalTestCase))
suite.addTest(unittest.makeSuite(UnitTestCase))
suite.addTest(unittest.makeSuite(RegressionTestCase))
suite.addTest(unittest.makeSuite(LinkTestCase))
suite.addTest(unittest.makeSuite(AutoRenderTestCase))
return suite
|
// ... existing code ...
"""Django page CMS test suite module"""
import unittest
def suite():
suite = unittest.TestSuite()
from pages.tests.test_functionnal import FunctionnalTestCase
from pages.tests.test_unit import UnitTestCase
from pages.tests.test_regression import RegressionTestCase
from pages.tests.test_pages_link import LinkTestCase
from pages.tests.test_auto_render import AutoRenderTestCase
suite.addTest(unittest.makeSuite(FunctionnalTestCase))
suite.addTest(unittest.makeSuite(UnitTestCase))
suite.addTest(unittest.makeSuite(RegressionTestCase))
suite.addTest(unittest.makeSuite(LinkTestCase))
suite.addTest(unittest.makeSuite(AutoRenderTestCase))
return suite
// ... rest of the code ...
|
0bc83982e4df8c2eb0ec88952656a40504143c83
|
setup.py
|
setup.py
|
from __future__ import print_function, unicode_literals
import sys
import codecs
from setuptools import setup, find_packages
from nhentai import __version__, __author__, __email__
with open('requirements.txt') as f:
requirements = [l for l in f.read().splitlines() if l]
def long_description():
with codecs.open('README.md', 'rb') as f:
if sys.version_info >= (3, 0, 0):
return str(f.read())
setup(
name='nhentai',
version=__version__,
packages=find_packages(),
author=__author__,
author_email=__email__,
keywords='nhentai, doujinshi',
description='nhentai.net doujinshis downloader',
long_description=long_description(),
url='https://github.com/RicterZ/nhentai',
download_url='https://github.com/RicterZ/nhentai/tarball/master',
include_package_data=True,
zip_safe=False,
install_requires=requirements,
entry_points={
'console_scripts': [
'nhentai = nhentai.command:main',
]
},
license='MIT',
)
|
from __future__ import print_function, unicode_literals
import sys
import codecs
from setuptools import setup, find_packages
from nhentai import __version__, __author__, __email__
with open('requirements.txt') as f:
requirements = [l for l in f.read().splitlines() if l]
def long_description():
with codecs.open('README.md', 'rb') as f:
if sys.version_info >= (3, 0, 0):
return str(f.read())
setup(
name='nhentai',
version=__version__,
packages=find_packages(),
package_data={
'nhentai': ['viewer/**']
},
author=__author__,
author_email=__email__,
keywords='nhentai, doujinshi',
description='nhentai.net doujinshis downloader',
long_description=long_description(),
url='https://github.com/RicterZ/nhentai',
download_url='https://github.com/RicterZ/nhentai/tarball/master',
include_package_data=True,
zip_safe=False,
install_requires=requirements,
entry_points={
'console_scripts': [
'nhentai = nhentai.command:main',
]
},
license='MIT',
)
|
Add the viewer to the package_data entry
|
Add the viewer to the package_data entry
|
Python
|
mit
|
RicterZ/nhentai,RicterZ/nhentai,RicterZ/nhentai
|
python
|
## Code Before:
from __future__ import print_function, unicode_literals
import sys
import codecs
from setuptools import setup, find_packages
from nhentai import __version__, __author__, __email__
with open('requirements.txt') as f:
requirements = [l for l in f.read().splitlines() if l]
def long_description():
with codecs.open('README.md', 'rb') as f:
if sys.version_info >= (3, 0, 0):
return str(f.read())
setup(
name='nhentai',
version=__version__,
packages=find_packages(),
author=__author__,
author_email=__email__,
keywords='nhentai, doujinshi',
description='nhentai.net doujinshis downloader',
long_description=long_description(),
url='https://github.com/RicterZ/nhentai',
download_url='https://github.com/RicterZ/nhentai/tarball/master',
include_package_data=True,
zip_safe=False,
install_requires=requirements,
entry_points={
'console_scripts': [
'nhentai = nhentai.command:main',
]
},
license='MIT',
)
## Instruction:
Add the viewer to the package_data entry
## Code After:
from __future__ import print_function, unicode_literals
import sys
import codecs
from setuptools import setup, find_packages
from nhentai import __version__, __author__, __email__
with open('requirements.txt') as f:
requirements = [l for l in f.read().splitlines() if l]
def long_description():
with codecs.open('README.md', 'rb') as f:
if sys.version_info >= (3, 0, 0):
return str(f.read())
setup(
name='nhentai',
version=__version__,
packages=find_packages(),
package_data={
'nhentai': ['viewer/**']
},
author=__author__,
author_email=__email__,
keywords='nhentai, doujinshi',
description='nhentai.net doujinshis downloader',
long_description=long_description(),
url='https://github.com/RicterZ/nhentai',
download_url='https://github.com/RicterZ/nhentai/tarball/master',
include_package_data=True,
zip_safe=False,
install_requires=requirements,
entry_points={
'console_scripts': [
'nhentai = nhentai.command:main',
]
},
license='MIT',
)
|
# ... existing code ...
name='nhentai',
version=__version__,
packages=find_packages(),
package_data={
'nhentai': ['viewer/**']
},
author=__author__,
author_email=__email__,
# ... rest of the code ...
|
ce36dd825635c8487fcd9f83bd686a2dce7c318c
|
hello.py
|
hello.py
|
from flask import Flask
from flask import request
import os
from dogapi import dog_http_api as api
app = Flask(__name__)
api.api_key = os.environ.get('DD_API_KEY')
action_url = "/" + os.environ.get('BASE_URL') + "/"
@app.route(action_url, methods=['POST', 'GET'])
def hello():
api.metric('mailgun.event', (request.args.post('timestamp'), 1), tags=["event_name:" + request.args.post('event')])
return "200"
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
from flask import Flask
from flask import request
import os
from dogapi import dog_http_api as api
app = Flask(__name__)
api.api_key = os.environ.get('DD_API_KEY')
action_url = "/" + os.environ.get('BASE_URL') + "/"
@app.route(action_url, methods=['POST', 'GET'])
def hello():
api.metric('mailgun.event', (request.form('timestamp'), 1), tags=["event_name:" + request.form('event')])
return "200"
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
Use the right style of request.
|
Use the right style of request.
|
Python
|
apache-2.0
|
darron/mailgun_datadog
|
python
|
## Code Before:
from flask import Flask
from flask import request
import os
from dogapi import dog_http_api as api
app = Flask(__name__)
api.api_key = os.environ.get('DD_API_KEY')
action_url = "/" + os.environ.get('BASE_URL') + "/"
@app.route(action_url, methods=['POST', 'GET'])
def hello():
api.metric('mailgun.event', (request.args.post('timestamp'), 1), tags=["event_name:" + request.args.post('event')])
return "200"
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
## Instruction:
Use the right style of request.
## Code After:
from flask import Flask
from flask import request
import os
from dogapi import dog_http_api as api
app = Flask(__name__)
api.api_key = os.environ.get('DD_API_KEY')
action_url = "/" + os.environ.get('BASE_URL') + "/"
@app.route(action_url, methods=['POST', 'GET'])
def hello():
api.metric('mailgun.event', (request.form('timestamp'), 1), tags=["event_name:" + request.form('event')])
return "200"
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
...
@app.route(action_url, methods=['POST', 'GET'])
def hello():
api.metric('mailgun.event', (request.form('timestamp'), 1), tags=["event_name:" + request.form('event')])
return "200"
if __name__ == "__main__":
...
|
906505d85914287af3a031bf77f74dd79a4aaa32
|
pygraphc/preprocess/CreateGraphModel.py
|
pygraphc/preprocess/CreateGraphModel.py
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.JaroWinkler import JaroWinkler
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
jw = JaroWinkler(self.event_attributes, self.unique_events_length)
self.distances = jw.get_jarowinkler()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
Change jaro-winkler to cosine similarity
|
Change jaro-winkler to cosine similarity
|
Python
|
mit
|
studiawan/pygraphc
|
python
|
## Code Before:
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.JaroWinkler import JaroWinkler
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
jw = JaroWinkler(self.event_attributes, self.unique_events_length)
self.distances = jw.get_jarowinkler()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
## Instruction:
Change jaro-winkler to cosine similarity
## Code After:
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
...
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
...
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
...
|
5cb6e90714ffe91377e01743451ed4aefe4a1e24
|
greencard/greencard.py
|
greencard/greencard.py
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
import os
from glob import glob
import importlib
for testpath in glob(os.path.join(testdir, "*.py")):
importlib.import_module(testpath)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
Fix test descovery to correctly add test dir to path and import modules rather then files
|
Fix test descovery to correctly add test dir to path and import modules rather then files
|
Python
|
mit
|
Nekroze/greencard,Nekroze/greencard
|
python
|
## Code Before:
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
import os
from glob import glob
import importlib
for testpath in glob(os.path.join(testdir, "*.py")):
importlib.import_module(testpath)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
## Instruction:
Fix test descovery to correctly add test dir to path and import modules rather then files
## Code After:
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
...
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
...
|
5702f86c31305d5029f8d9d8bff02bb6b727a06a
|
utils/Hash.h
|
utils/Hash.h
|
template <typename K, typename V>
using HashMap = std::unordered_map<K, V>;
template <typename T>
using HashSet = std::unordered_set<T>;
#endif // HASH_H
|
template <typename K, typename V>
using HashMap = std::unordered_map<K, V>;
template <typename T>
using HashSet = std::unordered_set<T>;
// These come from boost
// Copyright 2005-2014 Daniel James.
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
static inline uint32_t combineHashes(uint32_t seed, uint32_t value) {
seed ^= value + 0x9e3779b9 + (seed << 6) + (seed >> 2);
return seed;
}
static inline uint64_t combineHashes(uint64_t h, uint64_t k) {
const uint64_t m = 0xc6a4a7935bd1e995UL;
const int r = 47;
k *= m;
k ^= k >> r;
k *= m;
h ^= k;
h *= m;
// Completely arbitrary number, to prevent 0's
// from hashing to 0.
h += 0xe6546b64;
return h;
}
template <typename It>
size_t hashRange(It b, It e) {
size_t h = 0;
for (It it = b; it != e; it++) {
h = combineHashes(h, std::hash<typename It::value_type>()(*it));
}
return h;
}
#endif // HASH_H
|
Add some hashing utility functions
|
Add some hashing utility functions
|
C
|
mit
|
turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo,turol/smaaDemo
|
c
|
## Code Before:
template <typename K, typename V>
using HashMap = std::unordered_map<K, V>;
template <typename T>
using HashSet = std::unordered_set<T>;
#endif // HASH_H
## Instruction:
Add some hashing utility functions
## Code After:
template <typename K, typename V>
using HashMap = std::unordered_map<K, V>;
template <typename T>
using HashSet = std::unordered_set<T>;
// These come from boost
// Copyright 2005-2014 Daniel James.
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
static inline uint32_t combineHashes(uint32_t seed, uint32_t value) {
seed ^= value + 0x9e3779b9 + (seed << 6) + (seed >> 2);
return seed;
}
static inline uint64_t combineHashes(uint64_t h, uint64_t k) {
const uint64_t m = 0xc6a4a7935bd1e995UL;
const int r = 47;
k *= m;
k ^= k >> r;
k *= m;
h ^= k;
h *= m;
// Completely arbitrary number, to prevent 0's
// from hashing to 0.
h += 0xe6546b64;
return h;
}
template <typename It>
size_t hashRange(It b, It e) {
size_t h = 0;
for (It it = b; it != e; it++) {
h = combineHashes(h, std::hash<typename It::value_type>()(*it));
}
return h;
}
#endif // HASH_H
|
# ... existing code ...
using HashSet = std::unordered_set<T>;
// These come from boost
// Copyright 2005-2014 Daniel James.
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
static inline uint32_t combineHashes(uint32_t seed, uint32_t value) {
seed ^= value + 0x9e3779b9 + (seed << 6) + (seed >> 2);
return seed;
}
static inline uint64_t combineHashes(uint64_t h, uint64_t k) {
const uint64_t m = 0xc6a4a7935bd1e995UL;
const int r = 47;
k *= m;
k ^= k >> r;
k *= m;
h ^= k;
h *= m;
// Completely arbitrary number, to prevent 0's
// from hashing to 0.
h += 0xe6546b64;
return h;
}
template <typename It>
size_t hashRange(It b, It e) {
size_t h = 0;
for (It it = b; it != e; it++) {
h = combineHashes(h, std::hash<typename It::value_type>()(*it));
}
return h;
}
#endif // HASH_H
# ... rest of the code ...
|
c501bba28d4a77ba03f6f1277be13913307f04e1
|
clowder/utility/print_utilities.py
|
clowder/utility/print_utilities.py
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
Print project name even if it doesn't exist on disk
|
Print project name even if it doesn't exist on disk
|
Python
|
mit
|
JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder
|
python
|
## Code Before:
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
## Instruction:
Print project name even if it doesn't exist on disk
## Code After:
"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
// ... existing code ...
"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
// ... modified code ...
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
// ... rest of the code ...
|
c672bfae82ee2cb46f5c604a0ad7aa5fc028474a
|
src/main/java/org/javacs/Lib.java
|
src/main/java/org/javacs/Lib.java
|
package org.javacs;
import java.io.File;
import java.lang.System;
import java.util.Optional;
import java.nio.file.*;
class Lib {
static Optional<Path> srcZipPath() {
return Optional.ofNullable(System.getenv("JAVA_HOME"))
.flatMap(home -> Optional.of(Paths.get(home).resolve("lib/src.zip")))
.flatMap(path -> {
if (path.toFile().exists()) {
return Optional.of(path);
} else {
return Optional.empty();
}
});
}
static final Optional<Path> SRC_ZIP = srcZipPath();
}
|
package org.javacs;
import java.io.File;
import java.lang.System;
import java.util.Optional;
import java.util.Arrays;
import java.nio.file.*;
class Lib {
static Optional<Path> srcZipPath() {
return Optional.ofNullable(System.getenv("JAVA_HOME"))
.map(home -> {
return Arrays.asList(new Path[]{
Paths.get(home).resolve("lib/src.zip"),
Paths.get(home).resolve("src.zip"),
});
})
.flatMap(paths -> {
for (Path path : paths) {
if (path.toFile().exists()) {
return Optional.of(path);
}
}
return Optional.empty();
});
}
static final Optional<Path> SRC_ZIP = srcZipPath();
}
|
Support other locations for finding src.zip
|
Support other locations for finding src.zip
What
===
Support other locations for finding src.zip so that it is found it if
lives at either:
- JAVA_HOME/lib/src.zip
- JAVA_HOME/src.zip
Why
===
I've observed in testing that some installations of JVMs include src.zip
in the root, others in a lib/src.zip.
|
Java
|
mit
|
georgewfraser/vscode-javac,georgewfraser/vscode-javac,georgewfraser/vscode-javac
|
java
|
## Code Before:
package org.javacs;
import java.io.File;
import java.lang.System;
import java.util.Optional;
import java.nio.file.*;
class Lib {
static Optional<Path> srcZipPath() {
return Optional.ofNullable(System.getenv("JAVA_HOME"))
.flatMap(home -> Optional.of(Paths.get(home).resolve("lib/src.zip")))
.flatMap(path -> {
if (path.toFile().exists()) {
return Optional.of(path);
} else {
return Optional.empty();
}
});
}
static final Optional<Path> SRC_ZIP = srcZipPath();
}
## Instruction:
Support other locations for finding src.zip
What
===
Support other locations for finding src.zip so that it is found it if
lives at either:
- JAVA_HOME/lib/src.zip
- JAVA_HOME/src.zip
Why
===
I've observed in testing that some installations of JVMs include src.zip
in the root, others in a lib/src.zip.
## Code After:
package org.javacs;
import java.io.File;
import java.lang.System;
import java.util.Optional;
import java.util.Arrays;
import java.nio.file.*;
class Lib {
static Optional<Path> srcZipPath() {
return Optional.ofNullable(System.getenv("JAVA_HOME"))
.map(home -> {
return Arrays.asList(new Path[]{
Paths.get(home).resolve("lib/src.zip"),
Paths.get(home).resolve("src.zip"),
});
})
.flatMap(paths -> {
for (Path path : paths) {
if (path.toFile().exists()) {
return Optional.of(path);
}
}
return Optional.empty();
});
}
static final Optional<Path> SRC_ZIP = srcZipPath();
}
|
...
import java.io.File;
import java.lang.System;
import java.util.Optional;
import java.util.Arrays;
import java.nio.file.*;
class Lib {
static Optional<Path> srcZipPath() {
return Optional.ofNullable(System.getenv("JAVA_HOME"))
.map(home -> {
return Arrays.asList(new Path[]{
Paths.get(home).resolve("lib/src.zip"),
Paths.get(home).resolve("src.zip"),
});
})
.flatMap(paths -> {
for (Path path : paths) {
if (path.toFile().exists()) {
return Optional.of(path);
}
}
return Optional.empty();
});
}
...
|
fe4bc023d207f219e487badc668f81ce7485ba5a
|
sympy/utilities/source.py
|
sympy/utilities/source.py
|
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
Remove a redundant line from get_class
|
Remove a redundant line from get_class
|
Python
|
bsd-3-clause
|
emon10005/sympy,ahhda/sympy,kaichogami/sympy,mafiya69/sympy,Designist/sympy,aktech/sympy,Titan-C/sympy,jerli/sympy,Davidjohnwilson/sympy,sampadsaha5/sympy,hargup/sympy,drufat/sympy,Vishluck/sympy,maniteja123/sympy,wanglongqi/sympy,jaimahajan1997/sympy,yukoba/sympy,AkademieOlympia/sympy,ChristinaZografou/sympy,Titan-C/sympy,souravsingh/sympy,yashsharan/sympy,kaushik94/sympy,VaibhavAgarwalVA/sympy,mcdaniel67/sympy,kevalds51/sympy,emon10005/sympy,rahuldan/sympy,Davidjohnwilson/sympy,AkademieOlympia/sympy,pandeyadarsh/sympy,ga7g08/sympy,Vishluck/sympy,cswiercz/sympy,jaimahajan1997/sympy,kevalds51/sympy,Vishluck/sympy,drufat/sympy,Designist/sympy,moble/sympy,lindsayad/sympy,postvakje/sympy,cswiercz/sympy,debugger22/sympy,mafiya69/sympy,debugger22/sympy,saurabhjn76/sympy,iamutkarshtiwari/sympy,yashsharan/sympy,Curious72/sympy,farhaanbukhsh/sympy,mcdaniel67/sympy,yukoba/sympy,abhiii5459/sympy,sahmed95/sympy,skidzo/sympy,atreyv/sympy,Curious72/sympy,maniteja123/sympy,ga7g08/sympy,shikil/sympy,atreyv/sympy,maniteja123/sympy,saurabhjn76/sympy,skidzo/sympy,ahhda/sympy,Shaswat27/sympy,oliverlee/sympy,Arafatk/sympy,sahmed95/sympy,jbbskinny/sympy,shikil/sympy,abhiii5459/sympy,ahhda/sympy,Titan-C/sympy,rahuldan/sympy,madan96/sympy,debugger22/sympy,Shaswat27/sympy,iamutkarshtiwari/sympy,kaushik94/sympy,kaichogami/sympy,jerli/sympy,mafiya69/sympy,MechCoder/sympy,moble/sympy,pandeyadarsh/sympy,abhiii5459/sympy,Davidjohnwilson/sympy,emon10005/sympy,wanglongqi/sympy,oliverlee/sympy,ga7g08/sympy,kevalds51/sympy,postvakje/sympy,oliverlee/sympy,postvakje/sympy,farhaanbukhsh/sympy,lindsayad/sympy,jaimahajan1997/sympy,yukoba/sympy,kaushik94/sympy,sampadsaha5/sympy,rahuldan/sympy,VaibhavAgarwalVA/sympy,sahmed95/sympy,chaffra/sympy,yashsharan/sympy,Designist/sympy,shikil/sympy,cswiercz/sympy,hargup/sympy,iamutkarshtiwari/sympy,farhaanbukhsh/sympy,mcdaniel67/sympy,pandeyadarsh/sympy,kaichogami/sympy,chaffra/sympy,MechCoder/sympy,atreyv/sympy,drufat/sympy,Arafatk/sympy,Curious72/sympy,aktech/sympy,Arafatk/sympy,wanglongqi/sympy,saurabhjn76/sympy,jbbskinny/sympy,Shaswat27/sympy,ChristinaZografou/sympy,aktech/sympy,jerli/sympy,madan96/sympy,VaibhavAgarwalVA/sympy,madan96/sympy,moble/sympy,chaffra/sympy,hargup/sympy,souravsingh/sympy,AkademieOlympia/sympy,souravsingh/sympy,MechCoder/sympy,lindsayad/sympy,sampadsaha5/sympy,jbbskinny/sympy,ChristinaZografou/sympy,skidzo/sympy
|
python
|
## Code Before:
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
## Instruction:
Remove a redundant line from get_class
## Code After:
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
# ... existing code ...
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
# ... rest of the code ...
|
d0e7d3578fe79432ad2b2cc62be2203d4ff36014
|
examples/charts/file/cat_heatmap.py
|
examples/charts/file/cat_heatmap.py
|
from bokeh.charts import HeatMap, output_file, show
from bokeh.sampledata.unemployment1948 import data
# pandas magic
df = data[data.columns[:-2]]
df2 = df.set_index(df[df.columns[0]].astype(str))
df2.drop(df.columns[0], axis=1, inplace=True)
df3 = df2.transpose()
output_file("cat_heatmap.html")
hm = HeatMap(df3, title="categorical heatmap", width=800)
show(hm)
|
from bokeh.charts import HeatMap, output_file, show
from bokeh.palettes import YlOrRd9 as palette
from bokeh.sampledata.unemployment1948 import data
# pandas magic
df = data[data.columns[:-1]]
df2 = df.set_index(df[df.columns[0]].astype(str))
df2.drop(df.columns[0], axis=1, inplace=True)
df3 = df2.transpose()
output_file("cat_heatmap.html")
palette = palette[::-1] # Reverse the color order so dark red is highest unemployment
hm = HeatMap(df3, title="categorical heatmap", width=800, palette=palette)
show(hm)
|
Use all the months of the year and tweak palette.
|
Use all the months of the year and tweak palette.
- Picked a continuous palette and reversed as better for the data.
|
Python
|
bsd-3-clause
|
abele/bokeh,laurent-george/bokeh,azjps/bokeh,josherick/bokeh,rs2/bokeh,timsnyder/bokeh,draperjames/bokeh,matbra/bokeh,ptitjano/bokeh,DuCorey/bokeh,awanke/bokeh,akloster/bokeh,xguse/bokeh,stonebig/bokeh,KasperPRasmussen/bokeh,phobson/bokeh,stonebig/bokeh,CrazyGuo/bokeh,ericmjl/bokeh,rhiever/bokeh,caseyclements/bokeh,roxyboy/bokeh,maxalbert/bokeh,ericdill/bokeh,KasperPRasmussen/bokeh,maxalbert/bokeh,stuart-knock/bokeh,draperjames/bokeh,carlvlewis/bokeh,akloster/bokeh,srinathv/bokeh,timsnyder/bokeh,khkaminska/bokeh,gpfreitas/bokeh,bokeh/bokeh,dennisobrien/bokeh,rothnic/bokeh,Karel-van-de-Plassche/bokeh,deeplook/bokeh,PythonCharmers/bokeh,matbra/bokeh,ericmjl/bokeh,ahmadia/bokeh,justacec/bokeh,aiguofer/bokeh,muku42/bokeh,ChristosChristofidis/bokeh,matbra/bokeh,rothnic/bokeh,mindriot101/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,timothydmorton/bokeh,ChristosChristofidis/bokeh,philippjfr/bokeh,josherick/bokeh,rhiever/bokeh,roxyboy/bokeh,alan-unravel/bokeh,jakirkham/bokeh,timsnyder/bokeh,philippjfr/bokeh,ahmadia/bokeh,aavanian/bokeh,carlvlewis/bokeh,maxalbert/bokeh,muku42/bokeh,laurent-george/bokeh,justacec/bokeh,evidation-health/bokeh,laurent-george/bokeh,bsipocz/bokeh,DuCorey/bokeh,rs2/bokeh,saifrahmed/bokeh,quasiben/bokeh,justacec/bokeh,ptitjano/bokeh,saifrahmed/bokeh,jakirkham/bokeh,jplourenco/bokeh,ChinaQuants/bokeh,percyfal/bokeh,stonebig/bokeh,ericmjl/bokeh,daodaoliang/bokeh,eteq/bokeh,abele/bokeh,htygithub/bokeh,aiguofer/bokeh,khkaminska/bokeh,ahmadia/bokeh,stuart-knock/bokeh,htygithub/bokeh,paultcochrane/bokeh,jakirkham/bokeh,muku42/bokeh,srinathv/bokeh,alan-unravel/bokeh,philippjfr/bokeh,htygithub/bokeh,eteq/bokeh,bokeh/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,daodaoliang/bokeh,eteq/bokeh,schoolie/bokeh,satishgoda/bokeh,aavanian/bokeh,carlvlewis/bokeh,PythonCharmers/bokeh,phobson/bokeh,josherick/bokeh,schoolie/bokeh,aiguofer/bokeh,daodaoliang/bokeh,jplourenco/bokeh,roxyboy/bokeh,Karel-van-de-Plassche/bokeh,jplourenco/bokeh,tacaswell/bokeh,azjps/bokeh,quasiben/bokeh,dennisobrien/bokeh,saifrahmed/bokeh,mindriot101/bokeh,azjps/bokeh,xguse/bokeh,bsipocz/bokeh,caseyclements/bokeh,clairetang6/bokeh,paultcochrane/bokeh,saifrahmed/bokeh,stuart-knock/bokeh,gpfreitas/bokeh,dennisobrien/bokeh,alan-unravel/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,Karel-van-de-Plassche/bokeh,DuCorey/bokeh,xguse/bokeh,jplourenco/bokeh,PythonCharmers/bokeh,PythonCharmers/bokeh,schoolie/bokeh,philippjfr/bokeh,gpfreitas/bokeh,xguse/bokeh,timsnyder/bokeh,deeplook/bokeh,khkaminska/bokeh,evidation-health/bokeh,paultcochrane/bokeh,aiguofer/bokeh,aavanian/bokeh,DuCorey/bokeh,msarahan/bokeh,stonebig/bokeh,ChristosChristofidis/bokeh,abele/bokeh,rhiever/bokeh,draperjames/bokeh,bsipocz/bokeh,ericdill/bokeh,rhiever/bokeh,awanke/bokeh,azjps/bokeh,muku42/bokeh,srinathv/bokeh,timsnyder/bokeh,satishgoda/bokeh,satishgoda/bokeh,eteq/bokeh,msarahan/bokeh,daodaoliang/bokeh,rothnic/bokeh,awanke/bokeh,maxalbert/bokeh,evidation-health/bokeh,ericmjl/bokeh,jakirkham/bokeh,ericdill/bokeh,ChinaQuants/bokeh,satishgoda/bokeh,Karel-van-de-Plassche/bokeh,paultcochrane/bokeh,CrazyGuo/bokeh,deeplook/bokeh,akloster/bokeh,caseyclements/bokeh,draperjames/bokeh,aavanian/bokeh,deeplook/bokeh,ChinaQuants/bokeh,CrazyGuo/bokeh,rs2/bokeh,bokeh/bokeh,clairetang6/bokeh,dennisobrien/bokeh,timothydmorton/bokeh,rs2/bokeh,ahmadia/bokeh,dennisobrien/bokeh,matbra/bokeh,justacec/bokeh,mindriot101/bokeh,timothydmorton/bokeh,akloster/bokeh,clairetang6/bokeh,awanke/bokeh,tacaswell/bokeh,schoolie/bokeh,evidation-health/bokeh,ptitjano/bokeh,schoolie/bokeh,htygithub/bokeh,alan-unravel/bokeh,ptitjano/bokeh,bokeh/bokeh,stuart-knock/bokeh,srinathv/bokeh,rs2/bokeh,ChristosChristofidis/bokeh,carlvlewis/bokeh,mindriot101/bokeh,msarahan/bokeh,gpfreitas/bokeh,KasperPRasmussen/bokeh,roxyboy/bokeh,percyfal/bokeh,aavanian/bokeh,CrazyGuo/bokeh,percyfal/bokeh,caseyclements/bokeh,azjps/bokeh,msarahan/bokeh,ericdill/bokeh,ptitjano/bokeh,percyfal/bokeh,aiguofer/bokeh,clairetang6/bokeh,bokeh/bokeh,josherick/bokeh,bsipocz/bokeh,phobson/bokeh,abele/bokeh,timothydmorton/bokeh,percyfal/bokeh,DuCorey/bokeh,quasiben/bokeh,tacaswell/bokeh,khkaminska/bokeh,phobson/bokeh,tacaswell/bokeh,rothnic/bokeh,draperjames/bokeh,philippjfr/bokeh,ericmjl/bokeh,laurent-george/bokeh
|
python
|
## Code Before:
from bokeh.charts import HeatMap, output_file, show
from bokeh.sampledata.unemployment1948 import data
# pandas magic
df = data[data.columns[:-2]]
df2 = df.set_index(df[df.columns[0]].astype(str))
df2.drop(df.columns[0], axis=1, inplace=True)
df3 = df2.transpose()
output_file("cat_heatmap.html")
hm = HeatMap(df3, title="categorical heatmap", width=800)
show(hm)
## Instruction:
Use all the months of the year and tweak palette.
- Picked a continuous palette and reversed as better for the data.
## Code After:
from bokeh.charts import HeatMap, output_file, show
from bokeh.palettes import YlOrRd9 as palette
from bokeh.sampledata.unemployment1948 import data
# pandas magic
df = data[data.columns[:-1]]
df2 = df.set_index(df[df.columns[0]].astype(str))
df2.drop(df.columns[0], axis=1, inplace=True)
df3 = df2.transpose()
output_file("cat_heatmap.html")
palette = palette[::-1] # Reverse the color order so dark red is highest unemployment
hm = HeatMap(df3, title="categorical heatmap", width=800, palette=palette)
show(hm)
|
# ... existing code ...
from bokeh.charts import HeatMap, output_file, show
from bokeh.palettes import YlOrRd9 as palette
from bokeh.sampledata.unemployment1948 import data
# pandas magic
df = data[data.columns[:-1]]
df2 = df.set_index(df[df.columns[0]].astype(str))
df2.drop(df.columns[0], axis=1, inplace=True)
df3 = df2.transpose()
# ... modified code ...
output_file("cat_heatmap.html")
palette = palette[::-1] # Reverse the color order so dark red is highest unemployment
hm = HeatMap(df3, title="categorical heatmap", width=800, palette=palette)
show(hm)
# ... rest of the code ...
|
72a29a96d123990adef08b392d0efccb6c5ecd69
|
src/util.h
|
src/util.h
|
namespace pl0 {
#if defined(_MSC_VER) || defined(__GNUC__)
namespace polyfill {
template <typename T, typename... Args>
void fold_write_stream(std::ostringstream &oss, T value, Args... args) {
oss << value;
fold_write_stream(oss, args...);
}
template <typename T>
void fold_write_stream(std::ostringstream &oss, T value) {
oss << value;
}
}
#endif
class general_error {
std::string message_;
public:
template <typename... Args>
general_error(Args... args) {
std::ostringstream oss;
#if defined(_MSC_VER) || defined(__GNUC__)
// Visual Studio 2017 does not support fold expression now.
// We need to make a polyfill.
polyfill::fold_write_stream(oss, args...);
#else
oss << ... << args;
#endif
message_ = oss.str();
}
const std::string &what() const { return message_; }
};
}
#endif
|
namespace pl0 {
#if defined(_MSC_VER) || defined(__GNUC__)
namespace polyfill {
template <typename T>
void fold_write_stream(std::ostringstream &oss, T value) {
oss << value;
}
template <typename T, typename... Args>
void fold_write_stream(std::ostringstream &oss, T value, Args... args) {
oss << value;
fold_write_stream(oss, args...);
}
}
#endif
class general_error {
std::string message_;
public:
template <typename... Args>
general_error(Args... args) {
std::ostringstream oss;
#if defined(_MSC_VER) || defined(__GNUC__)
// Visual Studio 2017 does not support fold expression now.
// We need to make a polyfill.
polyfill::fold_write_stream(oss, args...);
#else
oss << ... << args;
#endif
message_ = oss.str();
}
const std::string &what() const { return message_; }
};
}
#endif
|
Swap two overloads to make g++ happy.
|
Swap two overloads to make g++ happy.
Hmmmm.
|
C
|
mit
|
chengluyu/PL0
|
c
|
## Code Before:
namespace pl0 {
#if defined(_MSC_VER) || defined(__GNUC__)
namespace polyfill {
template <typename T, typename... Args>
void fold_write_stream(std::ostringstream &oss, T value, Args... args) {
oss << value;
fold_write_stream(oss, args...);
}
template <typename T>
void fold_write_stream(std::ostringstream &oss, T value) {
oss << value;
}
}
#endif
class general_error {
std::string message_;
public:
template <typename... Args>
general_error(Args... args) {
std::ostringstream oss;
#if defined(_MSC_VER) || defined(__GNUC__)
// Visual Studio 2017 does not support fold expression now.
// We need to make a polyfill.
polyfill::fold_write_stream(oss, args...);
#else
oss << ... << args;
#endif
message_ = oss.str();
}
const std::string &what() const { return message_; }
};
}
#endif
## Instruction:
Swap two overloads to make g++ happy.
Hmmmm.
## Code After:
namespace pl0 {
#if defined(_MSC_VER) || defined(__GNUC__)
namespace polyfill {
template <typename T>
void fold_write_stream(std::ostringstream &oss, T value) {
oss << value;
}
template <typename T, typename... Args>
void fold_write_stream(std::ostringstream &oss, T value, Args... args) {
oss << value;
fold_write_stream(oss, args...);
}
}
#endif
class general_error {
std::string message_;
public:
template <typename... Args>
general_error(Args... args) {
std::ostringstream oss;
#if defined(_MSC_VER) || defined(__GNUC__)
// Visual Studio 2017 does not support fold expression now.
// We need to make a polyfill.
polyfill::fold_write_stream(oss, args...);
#else
oss << ... << args;
#endif
message_ = oss.str();
}
const std::string &what() const { return message_; }
};
}
#endif
|
// ... existing code ...
#if defined(_MSC_VER) || defined(__GNUC__)
namespace polyfill {
template <typename T>
void fold_write_stream(std::ostringstream &oss, T value) {
oss << value;
}
template <typename T, typename... Args>
void fold_write_stream(std::ostringstream &oss, T value, Args... args) {
oss << value;
fold_write_stream(oss, args...);
}
}
// ... rest of the code ...
|
f2d5de53a560731476d96423988d2e55cdbdef32
|
src/main/java/starpunk/AssetManager.java
|
src/main/java/starpunk/AssetManager.java
|
package starpunk;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import java.util.HashMap;
public final class AssetManager
{
private final HashMap<String, TextureAtlas.AtlasRegion> _sprites = new HashMap<String, TextureAtlas.AtlasRegion>();
protected void initialize()
{
final TextureAtlas textureAtlas = new TextureAtlas( Gdx.files.internal( "target/assets/game" ) );
for( final TextureAtlas.AtlasRegion r : textureAtlas.getRegions() )
{
_sprites.put( r.name, r );
}
}
protected void dispose()
{
_sprites.clear();
}
public TextureAtlas.AtlasRegion getSprite( final String name )
{
return _sprites.get( name );
}
}
|
package starpunk;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import java.util.HashMap;
import java.util.Map;
public final class AssetManager
{
private final HashMap<String, TextureAtlas.AtlasRegion> _sprites = new HashMap<String, TextureAtlas.AtlasRegion>();
private final HashMap<String, TextureAtlas> _textures = new HashMap<String, TextureAtlas>();
protected void initialize()
{
final String textureName = "target/assets/game";
final TextureAtlas textureAtlas = new TextureAtlas( Gdx.files.internal( textureName ) );
for( final TextureAtlas.AtlasRegion r : textureAtlas.getRegions() )
{
_sprites.put( r.name, r );
}
_textures.put( textureName, textureAtlas );
}
protected void dispose()
{
for( final Map.Entry<String, TextureAtlas> entry : _textures.entrySet() )
{
entry.getValue().dispose();
}
_textures.clear();
_sprites.clear();
}
public TextureAtlas.AtlasRegion getSprite( final String name )
{
return _sprites.get( name );
}
}
|
Improve the dispose by disposing the containing atlas
|
Improve the dispose by disposing the containing atlas
|
Java
|
apache-2.0
|
realityforge-experiments/star-punk,realityforge-experiments/star-punk,realityforge-experiments/star-punk
|
java
|
## Code Before:
package starpunk;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import java.util.HashMap;
public final class AssetManager
{
private final HashMap<String, TextureAtlas.AtlasRegion> _sprites = new HashMap<String, TextureAtlas.AtlasRegion>();
protected void initialize()
{
final TextureAtlas textureAtlas = new TextureAtlas( Gdx.files.internal( "target/assets/game" ) );
for( final TextureAtlas.AtlasRegion r : textureAtlas.getRegions() )
{
_sprites.put( r.name, r );
}
}
protected void dispose()
{
_sprites.clear();
}
public TextureAtlas.AtlasRegion getSprite( final String name )
{
return _sprites.get( name );
}
}
## Instruction:
Improve the dispose by disposing the containing atlas
## Code After:
package starpunk;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import java.util.HashMap;
import java.util.Map;
public final class AssetManager
{
private final HashMap<String, TextureAtlas.AtlasRegion> _sprites = new HashMap<String, TextureAtlas.AtlasRegion>();
private final HashMap<String, TextureAtlas> _textures = new HashMap<String, TextureAtlas>();
protected void initialize()
{
final String textureName = "target/assets/game";
final TextureAtlas textureAtlas = new TextureAtlas( Gdx.files.internal( textureName ) );
for( final TextureAtlas.AtlasRegion r : textureAtlas.getRegions() )
{
_sprites.put( r.name, r );
}
_textures.put( textureName, textureAtlas );
}
protected void dispose()
{
for( final Map.Entry<String, TextureAtlas> entry : _textures.entrySet() )
{
entry.getValue().dispose();
}
_textures.clear();
_sprites.clear();
}
public TextureAtlas.AtlasRegion getSprite( final String name )
{
return _sprites.get( name );
}
}
|
...
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import java.util.HashMap;
import java.util.Map;
public final class AssetManager
{
private final HashMap<String, TextureAtlas.AtlasRegion> _sprites = new HashMap<String, TextureAtlas.AtlasRegion>();
private final HashMap<String, TextureAtlas> _textures = new HashMap<String, TextureAtlas>();
protected void initialize()
{
final String textureName = "target/assets/game";
final TextureAtlas textureAtlas = new TextureAtlas( Gdx.files.internal( textureName ) );
for( final TextureAtlas.AtlasRegion r : textureAtlas.getRegions() )
{
_sprites.put( r.name, r );
}
_textures.put( textureName, textureAtlas );
}
protected void dispose()
{
for( final Map.Entry<String, TextureAtlas> entry : _textures.entrySet() )
{
entry.getValue().dispose();
}
_textures.clear();
_sprites.clear();
}
...
|
dbe57e9b76194b13d90834163ebe8bf924464dd0
|
src/mcedit2/util/lazyprop.py
|
src/mcedit2/util/lazyprop.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
Add a property descriptor for weakref'd members
|
Add a property descriptor for weakref'd members
|
Python
|
bsd-3-clause
|
vorburger/mcedit2,Rubisk/mcedit2,Rubisk/mcedit2,vorburger/mcedit2
|
python
|
## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
## Instruction:
Add a property descriptor for weakref'd members
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
...
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
...
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
...
|
15013c51f602786265b59c1d4a7e894eae090d90
|
tests/test_normalize.py
|
tests/test_normalize.py
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
Test that direction is preserved
|
tests/normalize: Test that direction is preserved
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
python
|
## Code Before:
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
## Instruction:
tests/normalize: Test that direction is preserved
## Code After:
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
// ... existing code ...
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
// ... rest of the code ...
|
9541fd723308d51f7c380649a81b4992074a1193
|
workout_manager/urls.py
|
workout_manager/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
from django.conf.urls import patterns, include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
Append the name of the current language to the URLs
|
Append the name of the current language to the URLs
--HG--
branch : 1.1-dev
|
Python
|
agpl-3.0
|
DeveloperMal/wger,petervanderdoes/wger,DeveloperMal/wger,kjagoo/wger_stark,petervanderdoes/wger,wger-project/wger,kjagoo/wger_stark,rolandgeider/wger,wger-project/wger,DeveloperMal/wger,kjagoo/wger_stark,DeveloperMal/wger,petervanderdoes/wger,wger-project/wger,rolandgeider/wger,rolandgeider/wger,kjagoo/wger_stark,petervanderdoes/wger,rolandgeider/wger,wger-project/wger
|
python
|
## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
## Instruction:
Append the name of the current language to the URLs
--HG--
branch : 1.1-dev
## Code After:
from django.conf.urls import patterns, include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
url(r'weight/', include('weight.urls')),
url(r'nutrition/', include('nutrition.urls')),
url(r'^browserid/', include('django_browserid.urls')),
)
|
...
from django.conf.urls import patterns, include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('manager.urls')),
url(r'exercise/', include('exercises.urls')),
...
|
774b64779b18ff0d8fba048ab4c4cae53662628a
|
ummeli/vlive/auth/middleware.py
|
ummeli/vlive/auth/middleware.py
|
from django.contrib.auth.middleware import RemoteUserMiddleware
class VodafoneLiveUserMiddleware(RemoteUserMiddleware):
header = 'HTTP_X_UP_CALLING_LINE_ID'
class VodafoneLiveInfo(object):
pass
class VodafoneLiveInfoMiddleware(object):
"""
Friendlier access to device / request info that Vodafone Live makes
available to us via HTTP Headers
"""
def process_request(self, request):
vlive = VodafoneLiveInfo()
vlive.msisdn = request.META.get('HTTP_X_UP_CALLING_LINE_ID', 'unknown')
vlive.area = request.META.get('HTTP_X_VODAFONE_AREA', 'unknown')
request.vlive = vlive
print request.META
|
from django.contrib.auth.middleware import RemoteUserMiddleware
class VodafoneLiveUserMiddleware(RemoteUserMiddleware):
header = 'HTTP_X_UP_CALLING_LINE_ID'
class VodafoneLiveInfo(object):
pass
class VodafoneLiveInfoMiddleware(object):
"""
Friendlier access to device / request info that Vodafone Live makes
available to us via HTTP Headers
"""
def process_request(self, request):
vlive = VodafoneLiveInfo()
vlive.msisdn = request.META.get('HTTP_X_UP_CALLING_LINE_ID', 'unknown')
vlive.area = request.META.get('HTTP_X_VODAFONE_AREA', 'unknown')
request.vlive = vlive
|
Revert "printing META for troubleshooting"
|
Revert "printing META for troubleshooting"
This reverts commit 42d15d528da14866f2f0479da6462c17a02d8c84.
|
Python
|
bsd-3-clause
|
praekelt/ummeli,praekelt/ummeli,praekelt/ummeli
|
python
|
## Code Before:
from django.contrib.auth.middleware import RemoteUserMiddleware
class VodafoneLiveUserMiddleware(RemoteUserMiddleware):
header = 'HTTP_X_UP_CALLING_LINE_ID'
class VodafoneLiveInfo(object):
pass
class VodafoneLiveInfoMiddleware(object):
"""
Friendlier access to device / request info that Vodafone Live makes
available to us via HTTP Headers
"""
def process_request(self, request):
vlive = VodafoneLiveInfo()
vlive.msisdn = request.META.get('HTTP_X_UP_CALLING_LINE_ID', 'unknown')
vlive.area = request.META.get('HTTP_X_VODAFONE_AREA', 'unknown')
request.vlive = vlive
print request.META
## Instruction:
Revert "printing META for troubleshooting"
This reverts commit 42d15d528da14866f2f0479da6462c17a02d8c84.
## Code After:
from django.contrib.auth.middleware import RemoteUserMiddleware
class VodafoneLiveUserMiddleware(RemoteUserMiddleware):
header = 'HTTP_X_UP_CALLING_LINE_ID'
class VodafoneLiveInfo(object):
pass
class VodafoneLiveInfoMiddleware(object):
"""
Friendlier access to device / request info that Vodafone Live makes
available to us via HTTP Headers
"""
def process_request(self, request):
vlive = VodafoneLiveInfo()
vlive.msisdn = request.META.get('HTTP_X_UP_CALLING_LINE_ID', 'unknown')
vlive.area = request.META.get('HTTP_X_VODAFONE_AREA', 'unknown')
request.vlive = vlive
|
// ... existing code ...
vlive.msisdn = request.META.get('HTTP_X_UP_CALLING_LINE_ID', 'unknown')
vlive.area = request.META.get('HTTP_X_VODAFONE_AREA', 'unknown')
request.vlive = vlive
// ... rest of the code ...
|
cb33141dc6882d032a3edb51d6bf03d26a396db1
|
src/main/java/com/worldcretornica/plotme_abstractgenerator/bukkit/BukkitBlockRepresentation.java
|
src/main/java/com/worldcretornica/plotme_abstractgenerator/bukkit/BukkitBlockRepresentation.java
|
package com.worldcretornica.plotme_abstractgenerator.bukkit;
public class BukkitBlockRepresentation {
private final short id;
private final byte data;
public BukkitBlockRepresentation(short id, byte value) {
this.id = id;
this.data = value;
}
public BukkitBlockRepresentation(String idValue) {
this(getBlockId(idValue), getBlockData(idValue));
}
public static byte getBlockId(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Byte.parseByte(idValue.split(":")[0]);
} else {
return Byte.parseByte(idValue);
}
}
public static byte getBlockData(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Byte.parseByte(idValue.split(":")[1]);
} else {
return 0;
}
}
public short getId() {
return id;
}
public byte getData() {
return data;
}
}
|
package com.worldcretornica.plotme_abstractgenerator.bukkit;
public class BukkitBlockRepresentation {
private final short id;
private final byte data;
public BukkitBlockRepresentation(short id, byte value) {
this.id = id;
this.data = value;
}
public BukkitBlockRepresentation(String idValue) {
this(getBlockId(idValue), getBlockData(idValue));
}
public static short getBlockId(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Short.parseShort(idValue.split(":")[0]);
} else {
return Short.parseShort(idValue);
}
}
public static byte getBlockData(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Byte.parseByte(idValue.split(":")[1]);
} else {
return 0;
}
}
public short getId() {
return id;
}
public byte getData() {
return data;
}
}
|
Use short instead of bytes for block id
|
Use short instead of bytes for block id
|
Java
|
mit
|
WorldCretornica/PlotMe-AbstractGenerator
|
java
|
## Code Before:
package com.worldcretornica.plotme_abstractgenerator.bukkit;
public class BukkitBlockRepresentation {
private final short id;
private final byte data;
public BukkitBlockRepresentation(short id, byte value) {
this.id = id;
this.data = value;
}
public BukkitBlockRepresentation(String idValue) {
this(getBlockId(idValue), getBlockData(idValue));
}
public static byte getBlockId(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Byte.parseByte(idValue.split(":")[0]);
} else {
return Byte.parseByte(idValue);
}
}
public static byte getBlockData(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Byte.parseByte(idValue.split(":")[1]);
} else {
return 0;
}
}
public short getId() {
return id;
}
public byte getData() {
return data;
}
}
## Instruction:
Use short instead of bytes for block id
## Code After:
package com.worldcretornica.plotme_abstractgenerator.bukkit;
public class BukkitBlockRepresentation {
private final short id;
private final byte data;
public BukkitBlockRepresentation(short id, byte value) {
this.id = id;
this.data = value;
}
public BukkitBlockRepresentation(String idValue) {
this(getBlockId(idValue), getBlockData(idValue));
}
public static short getBlockId(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Short.parseShort(idValue.split(":")[0]);
} else {
return Short.parseShort(idValue);
}
}
public static byte getBlockData(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Byte.parseByte(idValue.split(":")[1]);
} else {
return 0;
}
}
public short getId() {
return id;
}
public byte getData() {
return data;
}
}
|
// ... existing code ...
this(getBlockId(idValue), getBlockData(idValue));
}
public static short getBlockId(String idValue) throws NumberFormatException {
if (idValue.indexOf(":") > 0) {
return Short.parseShort(idValue.split(":")[0]);
} else {
return Short.parseShort(idValue);
}
}
// ... rest of the code ...
|
de348d8816151f2674410566f3eaff9d43d9dcde
|
src/markdoc/cli/main.py
|
src/markdoc/cli/main.py
|
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
import logging
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
Use logging levels to suppress non-error output with --quiet on the CLI.
|
Use logging levels to suppress non-error output with --quiet on the CLI.
|
Python
|
unlicense
|
wlonk/markdoc,lrem/phdoc,lrem/phdoc,zacharyvoase/markdoc,snoozbuster/markdoc,wlonk/markdoc,snoozbuster/markdoc
|
python
|
## Code Before:
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
## Instruction:
Use logging levels to suppress non-error output with --quiet on the CLI.
## Code After:
import logging
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
# ... existing code ...
import logging
import os
import argparse
# ... modified code ...
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
# ... rest of the code ...
|
7d4d1afc5a42edb88f5cb8eb1347b79fdc131272
|
src/actions/client.py
|
src/actions/client.py
|
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
self.urls.append(link)
print link
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
# once you receive the message then add it to the
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# is the reactor done?
get_file_urls(fileserver)
if __name__ == '__main__':
main()
|
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
if __name__ == '__main__':
main()
|
Stop reactor and find files
|
Stop reactor and find files
|
Python
|
mit
|
derwolfe/teiler,derwolfe/teiler
|
python
|
## Code Before:
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
self.urls.append(link)
print link
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
# once you receive the message then add it to the
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# is the reactor done?
get_file_urls(fileserver)
if __name__ == '__main__':
main()
## Instruction:
Stop reactor and find files
## Code After:
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
if __name__ == '__main__':
main()
|
// ... existing code ...
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
// ... modified code ...
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
...
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
// ... rest of the code ...
|
8b92bc6c4a782dbb83aadb1bbfc5951dc53f53e1
|
netbox/dcim/migrations/0145_site_remove_deprecated_fields.py
|
netbox/dcim/migrations/0145_site_remove_deprecated_fields.py
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
from django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
Add migration safeguard to prevent accidental destruction of data
|
Add migration safeguard to prevent accidental destruction of data
|
Python
|
apache-2.0
|
digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox
|
python
|
## Code Before:
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
## Instruction:
Add migration safeguard to prevent accidental destruction of data
## Code After:
from django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0144_fix_cable_abs_length'),
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
),
migrations.RemoveField(
model_name='site',
name='contact_email',
),
migrations.RemoveField(
model_name='site',
name='contact_name',
),
migrations.RemoveField(
model_name='site',
name='contact_phone',
),
]
|
// ... existing code ...
from django.db import migrations
from django.db.utils import DataError
def check_legacy_data(apps, schema_editor):
"""
Abort the migration if any legacy site fields still contain data.
"""
Site = apps.get_model('dcim', 'Site')
if site_count := Site.objects.exclude(asn__isnull=True).count():
raise DataError(
f"Unable to proceed with deleting asn field from Site model: Found {site_count} sites with "
f"legacy ASN data. Please ensure all legacy site ASN data has been migrated to ASN objects "
f"before proceeding."
)
if site_count := Site.objects.exclude(contact_name='', contact_phone='', contact_email='').count():
raise DataError(
f"Unable to proceed with deleting contact fields from Site model: Found {site_count} sites "
f"with legacy contact data. Please ensure all legacy site contact data has been migrated to "
f"contact objects before proceeding."
)
class Migration(migrations.Migration):
// ... modified code ...
]
operations = [
migrations.RunPython(
code=check_legacy_data,
reverse_code=migrations.RunPython.noop
),
migrations.RemoveField(
model_name='site',
name='asn',
// ... rest of the code ...
|
eb94c85937f353c103b9d610bf4a851b05d2e634
|
src/test/resources/shared/nonexecute/StackOverflow.java
|
src/test/resources/shared/nonexecute/StackOverflow.java
|
/**
* This test simply overflows the stack through recursion.
*
* @author Elvis Stansvik <[email protected]>
*/
class StackOverflow {
public static void main(String[] args) {
Overflower o;
o = new Overflower();
o.overflow();
}
}
class Overflower {
public int overflow() {
int a;
int b;
int c;
return this.overflow();
}
}
|
/**
* This test simply overflows the stack through recursion.
*
* @author Elvis Stansvik <[email protected]>
*/
class StackOverflow {
public static void main(String[] args) {
Overflower o;
int r;
o = new Overflower();
r = o.overflow();
}
}
class Overflower {
public int overflow() {
int a;
int b;
int c;
return this.overflow();
}
}
|
Make test case MiniJava (bah!).
|
Make test case MiniJava (bah!).
|
Java
|
mit
|
estan/mjc,estan/mjc,liuxinglanyue/mjc,estan/mjc,liuxinglanyue/mjc,liuxinglanyue/mjc
|
java
|
## Code Before:
/**
* This test simply overflows the stack through recursion.
*
* @author Elvis Stansvik <[email protected]>
*/
class StackOverflow {
public static void main(String[] args) {
Overflower o;
o = new Overflower();
o.overflow();
}
}
class Overflower {
public int overflow() {
int a;
int b;
int c;
return this.overflow();
}
}
## Instruction:
Make test case MiniJava (bah!).
## Code After:
/**
* This test simply overflows the stack through recursion.
*
* @author Elvis Stansvik <[email protected]>
*/
class StackOverflow {
public static void main(String[] args) {
Overflower o;
int r;
o = new Overflower();
r = o.overflow();
}
}
class Overflower {
public int overflow() {
int a;
int b;
int c;
return this.overflow();
}
}
|
...
class StackOverflow {
public static void main(String[] args) {
Overflower o;
int r;
o = new Overflower();
r = o.overflow();
}
}
...
|
b9aa93c127573b4b1d068290bb4ca2e3eaf4b82c
|
GerritCommon/src/main/java/com.holmsted/file/FileWriter.java
|
GerritCommon/src/main/java/com.holmsted/file/FileWriter.java
|
package com.holmsted.file;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
public class FileWriter {
public static void writeFile(String filename, String contents) {
File dataFile = new File(filename);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
Writer writer = new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8"));
writer.write(contents);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void writeFile(String filename, InputStream inputStream) {
File dataFile = new File(filename);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
byte[] buffer = new byte[4096];
int readBytes;
while ((readBytes = inputStream.read(buffer)) > 0) {
outputStream.write(buffer, 0, readBytes);
}
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
package com.holmsted.file;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import javax.annotation.Nonnull;
public class FileWriter {
public static void writeFile(String filename, String contents) {
File dataFile = new File(filename);
mkdirsForFile(dataFile);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
Writer writer = new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8"));
writer.write(contents);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void writeFile(String filename, InputStream inputStream) {
File dataFile = new File(filename);
mkdirsForFile(dataFile);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
byte[] buffer = new byte[4096];
int readBytes;
while ((readBytes = inputStream.read(buffer)) > 0) {
outputStream.write(buffer, 0, readBytes);
}
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private static void mkdirsForFile(@Nonnull File file) {
File path = new File(file.getParent());
if (!path.exists() && !path.mkdirs()) {
throw new IllegalArgumentException(
"Path did not exist and could not be created: '" + path.getAbsolutePath() + "'");
}
}
}
|
Create directories for output if they don't already exist
|
Create directories for output if they don't already exist
Especially the downloader should do this to allow for easier
scripting.
|
Java
|
mit
|
holmari/gerritstats,holmari/gerritstats,holmari/gerritstats,holmari/gerritstats
|
java
|
## Code Before:
package com.holmsted.file;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
public class FileWriter {
public static void writeFile(String filename, String contents) {
File dataFile = new File(filename);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
Writer writer = new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8"));
writer.write(contents);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void writeFile(String filename, InputStream inputStream) {
File dataFile = new File(filename);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
byte[] buffer = new byte[4096];
int readBytes;
while ((readBytes = inputStream.read(buffer)) > 0) {
outputStream.write(buffer, 0, readBytes);
}
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
## Instruction:
Create directories for output if they don't already exist
Especially the downloader should do this to allow for easier
scripting.
## Code After:
package com.holmsted.file;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import javax.annotation.Nonnull;
public class FileWriter {
public static void writeFile(String filename, String contents) {
File dataFile = new File(filename);
mkdirsForFile(dataFile);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
Writer writer = new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8"));
writer.write(contents);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void writeFile(String filename, InputStream inputStream) {
File dataFile = new File(filename);
mkdirsForFile(dataFile);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
byte[] buffer = new byte[4096];
int readBytes;
while ((readBytes = inputStream.read(buffer)) > 0) {
outputStream.write(buffer, 0, readBytes);
}
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private static void mkdirsForFile(@Nonnull File file) {
File path = new File(file.getParent());
if (!path.exists() && !path.mkdirs()) {
throw new IllegalArgumentException(
"Path did not exist and could not be created: '" + path.getAbsolutePath() + "'");
}
}
}
|
// ... existing code ...
import java.io.OutputStreamWriter;
import java.io.Writer;
import javax.annotation.Nonnull;
public class FileWriter {
public static void writeFile(String filename, String contents) {
File dataFile = new File(filename);
mkdirsForFile(dataFile);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
// ... modified code ...
public static void writeFile(String filename, InputStream inputStream) {
File dataFile = new File(filename);
mkdirsForFile(dataFile);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(dataFile);
...
e.printStackTrace();
}
}
private static void mkdirsForFile(@Nonnull File file) {
File path = new File(file.getParent());
if (!path.exists() && !path.mkdirs()) {
throw new IllegalArgumentException(
"Path did not exist and could not be created: '" + path.getAbsolutePath() + "'");
}
}
}
// ... rest of the code ...
|
952ef8d596916b7e753c1179552a270430a21122
|
tests/test_lattice.py
|
tests/test_lattice.py
|
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice(DUMMY_NAME)
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() == element_length
# Get all elements
assert l.get_elements() == [e]
|
import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
|
Test getting elements with different family names.
|
Test getting elements with different family names.
|
Python
|
apache-2.0
|
razvanvasile/RML,willrogers/pml,willrogers/pml
|
python
|
## Code Before:
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice(DUMMY_NAME)
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() == element_length
# Get all elements
assert l.get_elements() == [e]
## Instruction:
Test getting elements with different family names.
## Code After:
import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
|
// ... existing code ...
import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
// ... modified code ...
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
// ... rest of the code ...
|
52b845ad513bfcc43f0ae4179d254a964676e7bf
|
src/de.sormuras.bach/main/java/de/sormuras/bach/util/Tools.java
|
src/de.sormuras.bach/main/java/de/sormuras/bach/util/Tools.java
|
package de.sormuras.bach.util;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.ServiceLoader;
import java.util.TreeMap;
import java.util.function.Consumer;
import java.util.spi.ToolProvider;
/** Tool registry. */
public class Tools {
final Map<String, ToolProvider> map;
public Tools() {
this.map = new TreeMap<>();
ServiceLoader.load(ToolProvider.class).stream()
.map(ServiceLoader.Provider::get)
.forEach(provider -> map.putIfAbsent(provider.name(), provider));
}
public ToolProvider get(String name) {
var tool = map.get(name);
if (tool == null) {
throw new NoSuchElementException("No such tool: " + name);
}
return tool;
}
public void forEach(Consumer<ToolProvider> action) {
map.values().forEach(action);
}
}
|
package de.sormuras.bach.util;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.ServiceLoader;
import java.util.TreeMap;
import java.util.function.Consumer;
import java.util.spi.ToolProvider;
/** Tool registry. */
public class Tools {
final Map<String, ToolProvider> map;
public Tools() {
this.map = new TreeMap<>();
ServiceLoader.load(ToolProvider.class, ClassLoader.getSystemClassLoader()).stream()
.map(ServiceLoader.Provider::get)
.forEach(provider -> map.putIfAbsent(provider.name(), provider));
}
public ToolProvider get(String name) {
var tool = map.get(name);
if (tool == null) {
throw new NoSuchElementException("No such tool: " + name);
}
return tool;
}
public void forEach(Consumer<ToolProvider> action) {
map.values().forEach(action);
}
}
|
Use system (application) class loader to load tool providers
|
Use system (application) class loader to load tool providers
|
Java
|
mit
|
sormuras/bach,sormuras/bach
|
java
|
## Code Before:
package de.sormuras.bach.util;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.ServiceLoader;
import java.util.TreeMap;
import java.util.function.Consumer;
import java.util.spi.ToolProvider;
/** Tool registry. */
public class Tools {
final Map<String, ToolProvider> map;
public Tools() {
this.map = new TreeMap<>();
ServiceLoader.load(ToolProvider.class).stream()
.map(ServiceLoader.Provider::get)
.forEach(provider -> map.putIfAbsent(provider.name(), provider));
}
public ToolProvider get(String name) {
var tool = map.get(name);
if (tool == null) {
throw new NoSuchElementException("No such tool: " + name);
}
return tool;
}
public void forEach(Consumer<ToolProvider> action) {
map.values().forEach(action);
}
}
## Instruction:
Use system (application) class loader to load tool providers
## Code After:
package de.sormuras.bach.util;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.ServiceLoader;
import java.util.TreeMap;
import java.util.function.Consumer;
import java.util.spi.ToolProvider;
/** Tool registry. */
public class Tools {
final Map<String, ToolProvider> map;
public Tools() {
this.map = new TreeMap<>();
ServiceLoader.load(ToolProvider.class, ClassLoader.getSystemClassLoader()).stream()
.map(ServiceLoader.Provider::get)
.forEach(provider -> map.putIfAbsent(provider.name(), provider));
}
public ToolProvider get(String name) {
var tool = map.get(name);
if (tool == null) {
throw new NoSuchElementException("No such tool: " + name);
}
return tool;
}
public void forEach(Consumer<ToolProvider> action) {
map.values().forEach(action);
}
}
|
...
public Tools() {
this.map = new TreeMap<>();
ServiceLoader.load(ToolProvider.class, ClassLoader.getSystemClassLoader()).stream()
.map(ServiceLoader.Provider::get)
.forEach(provider -> map.putIfAbsent(provider.name(), provider));
}
...
|
66ae18a11290e73a996d1e2f2ba8018e29c0f92b
|
sheepdog_tables/forms.py
|
sheepdog_tables/forms.py
|
import logging
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, HTML, Div, Submit
logger = logging.getLogger("sheepdog_tables")
class CSVExportForm(forms.Form):
id = forms.CharField(widget=forms.HiddenInput)
class EditTableSubmitForm(forms.Form):
def __init__(self, table, table_key, *args, **kwargs):
self.table = table
if not any([c.editable for c in self.table.table_columns.values()]):
print "Warning: Editable table has no editable columns"
logger.warning("Editable table has no editable columns")
super(EditTableSubmitForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'POST'
self.helper.form_class = 'form-horizontal'
self.helper.layout = Layout(
Div(
HTML("<h4>Bulk Editing</h4>"),
HTML("<p>This will submit all fields in the table.</p>"),
Div(
Div(
Submit(
name='submit', value="Save",
data_edittable_form="edittable_%s" % table_key,
css_class="btn btn-primary"),
css_class="filter-btns btn-group"),
css_class="filter-btns-row btn-toolbar"),
css_class="well filtering-well"),
)
|
import logging
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, HTML, Div, Submit
logger = logging.getLogger("sheepdog_tables")
class CSVExportForm(forms.Form):
id = forms.CharField(widget=forms.HiddenInput)
class EditTableSubmitForm(forms.Form):
def __init__(self, table, table_key, *args, **kwargs):
self.table = table
if not any([c.editable for c in self.table.table_columns.values()]):
print "Warning: Editable table has no editable columns"
super(EditTableSubmitForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'POST'
self.helper.form_class = 'form-horizontal'
self.helper.layout = Layout(
Div(
HTML("<h4>Bulk Editing</h4>"),
HTML("<p>This will submit all fields in the table.</p>"),
Div(
Div(
Submit(
name='submit', value="Save",
data_edittable_form="edittable_%s" % table_key,
css_class="btn btn-primary"),
css_class="filter-btns btn-group"),
css_class="filter-btns-row btn-toolbar"),
css_class="well filtering-well"),
)
|
Remove logger warning in favor of print for now
|
Remove logger warning in favor of print for now
|
Python
|
bsd-3-clause
|
SheepDogInc/sheepdog_tables,SheepDogInc/sheepdog_tables
|
python
|
## Code Before:
import logging
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, HTML, Div, Submit
logger = logging.getLogger("sheepdog_tables")
class CSVExportForm(forms.Form):
id = forms.CharField(widget=forms.HiddenInput)
class EditTableSubmitForm(forms.Form):
def __init__(self, table, table_key, *args, **kwargs):
self.table = table
if not any([c.editable for c in self.table.table_columns.values()]):
print "Warning: Editable table has no editable columns"
logger.warning("Editable table has no editable columns")
super(EditTableSubmitForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'POST'
self.helper.form_class = 'form-horizontal'
self.helper.layout = Layout(
Div(
HTML("<h4>Bulk Editing</h4>"),
HTML("<p>This will submit all fields in the table.</p>"),
Div(
Div(
Submit(
name='submit', value="Save",
data_edittable_form="edittable_%s" % table_key,
css_class="btn btn-primary"),
css_class="filter-btns btn-group"),
css_class="filter-btns-row btn-toolbar"),
css_class="well filtering-well"),
)
## Instruction:
Remove logger warning in favor of print for now
## Code After:
import logging
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, HTML, Div, Submit
logger = logging.getLogger("sheepdog_tables")
class CSVExportForm(forms.Form):
id = forms.CharField(widget=forms.HiddenInput)
class EditTableSubmitForm(forms.Form):
def __init__(self, table, table_key, *args, **kwargs):
self.table = table
if not any([c.editable for c in self.table.table_columns.values()]):
print "Warning: Editable table has no editable columns"
super(EditTableSubmitForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'POST'
self.helper.form_class = 'form-horizontal'
self.helper.layout = Layout(
Div(
HTML("<h4>Bulk Editing</h4>"),
HTML("<p>This will submit all fields in the table.</p>"),
Div(
Div(
Submit(
name='submit', value="Save",
data_edittable_form="edittable_%s" % table_key,
css_class="btn btn-primary"),
css_class="filter-btns btn-group"),
css_class="filter-btns-row btn-toolbar"),
css_class="well filtering-well"),
)
|
// ... existing code ...
self.table = table
if not any([c.editable for c in self.table.table_columns.values()]):
print "Warning: Editable table has no editable columns"
super(EditTableSubmitForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'POST'
// ... rest of the code ...
|
7079614f35de60def5f4e1cc1cb17cf3e5b4d9c6
|
setup.py
|
setup.py
|
from distutils.core import setup
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='facebook-sdk',
version='0.3.2',
description='This client library is designed to support the Facebook '
'Graph API and the official Facebook JavaScript SDK, which '
'is the canonical way to implement Facebook authentication.',
author='Facebook',
maintainer='Martey Dodoo',
maintainer_email='[email protected]',
url='https://github.com/pythonforfacebook/facebook-sdk',
license='Apache',
py_modules=[
'facebook',
],
long_description=read("README.rst"),
classifiers=[
'License :: OSI Approved :: Apache Software License',
],
)
|
from distutils.core import setup
setup(
name='facebook-sdk',
version='0.3.2',
description='This client library is designed to support the Facebook '
'Graph API and the official Facebook JavaScript SDK, which '
'is the canonical way to implement Facebook authentication.',
author='Facebook',
maintainer='Martey Dodoo',
maintainer_email='[email protected]',
url='https://github.com/pythonforfacebook/facebook-sdk',
license='Apache',
py_modules=[
'facebook',
],
long_description=open("README.rst").read(),
classifiers=[
'License :: OSI Approved :: Apache Software License',
],
)
|
Change way README is imported.
|
Change way README is imported.
The custom read function is unnecessary since only one file is being
accessed. Removing it reduces the amount of code.
|
Python
|
apache-2.0
|
Aloomaio/facebook-sdk,mobolic/facebook-sdk
|
python
|
## Code Before:
from distutils.core import setup
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='facebook-sdk',
version='0.3.2',
description='This client library is designed to support the Facebook '
'Graph API and the official Facebook JavaScript SDK, which '
'is the canonical way to implement Facebook authentication.',
author='Facebook',
maintainer='Martey Dodoo',
maintainer_email='[email protected]',
url='https://github.com/pythonforfacebook/facebook-sdk',
license='Apache',
py_modules=[
'facebook',
],
long_description=read("README.rst"),
classifiers=[
'License :: OSI Approved :: Apache Software License',
],
)
## Instruction:
Change way README is imported.
The custom read function is unnecessary since only one file is being
accessed. Removing it reduces the amount of code.
## Code After:
from distutils.core import setup
setup(
name='facebook-sdk',
version='0.3.2',
description='This client library is designed to support the Facebook '
'Graph API and the official Facebook JavaScript SDK, which '
'is the canonical way to implement Facebook authentication.',
author='Facebook',
maintainer='Martey Dodoo',
maintainer_email='[email protected]',
url='https://github.com/pythonforfacebook/facebook-sdk',
license='Apache',
py_modules=[
'facebook',
],
long_description=open("README.rst").read(),
classifiers=[
'License :: OSI Approved :: Apache Software License',
],
)
|
// ... existing code ...
from distutils.core import setup
setup(
name='facebook-sdk',
// ... modified code ...
py_modules=[
'facebook',
],
long_description=open("README.rst").read(),
classifiers=[
'License :: OSI Approved :: Apache Software License',
],
// ... rest of the code ...
|
3c6ffceade64f6eda28642cfcd61019a03938572
|
chrome/browser/spellchecker/word_trimmer.h
|
chrome/browser/spellchecker/word_trimmer.h
|
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#define CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#include "base/i18n/base_i18n_export.h"
#include "base/string16.h"
// Trims |text| to contain only the range from |start| to |end| and |keep| words
// on either side of the range. The |start| and |end| parameters are character
// indexes into |text|. The |keep| parameter is the number of words to keep on
// either side of the |start|-|end| range. The function updates |start| in
// accordance with the trimming.
//
// Example:
//
// size_t start = 14;
// size_t end = 23;
// string16 text = ASCIIToUTF16("one two three four five six seven eight");
// int keep = 2;
// string16 trimmed = TrimWords(&start, end, text, keep);
// DCHECK(trimmed == ASCIIToUTF16("two three four five six seven"));
// DCHECK(start == 10);
//
string16 TrimWords(
size_t* start,
size_t end,
const string16& text,
size_t keep);
#endif // CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
|
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#define CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#include "base/string16.h"
// Trims |text| to contain only the range from |start| to |end| and |keep| words
// on either side of the range. The |start| and |end| parameters are character
// indexes into |text|. The |keep| parameter is the number of words to keep on
// either side of the |start|-|end| range. The function updates |start| in
// accordance with the trimming.
//
// Example:
//
// size_t start = 14;
// size_t end = 23;
// string16 text = ASCIIToUTF16("one two three four five six seven eight");
// int keep = 2;
// string16 trimmed = TrimWords(&start, end, text, keep);
// DCHECK(trimmed == ASCIIToUTF16("two three four five six seven"));
// DCHECK(start == 10);
//
string16 TrimWords(
size_t* start,
size_t end,
const string16& text,
size_t keep);
#endif // CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
|
Remove unnecessary include in spellcheck word trimmer
|
Remove unnecessary include in spellcheck word trimmer
TBR=groby
BUG=170514
Review URL: https://chromiumcodereview.appspot.com/14273022
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@196650 0039d316-1c4b-4281-b951-d872f2087c98
|
C
|
bsd-3-clause
|
M4sse/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ltilve/chromium,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,hujiajie/pa-chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,Just-D/chromium-1,jaruba/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,Chilledheart/chromium,patrickm/chromium.src,bright-sparks/chromium-spacewalk,ltilve/chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,dednal/chromium.src,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,Chilledheart/chromium,anirudhSK/chromium,hujiajie/pa-chromium,dednal/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,anirudhSK/chromium,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,Chilledheart/chromium,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,littlstar/chromium.src,anirudhSK/chromium,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,Chilledheart/chromium,ltilve/chromium,axinging/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,patrickm/chromium.src,mogoweb/chromium-crosswalk,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,littlstar/chromium.src,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,littlstar/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,pozdnyakov/chromium-crosswalk,dednal/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hujiajie/pa-chromium,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,ondra-novak/chromium.src,Just-D/chromium-1,chuan9/chromium-crosswalk,hujiajie/pa-chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,jaruba/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,Jonekee/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,dednal/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,ondra-novak/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,markYoungH/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,M4sse/chromium.src,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,anirudhSK/chromium,ChromiumWebApps/chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl
|
c
|
## Code Before:
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#define CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#include "base/i18n/base_i18n_export.h"
#include "base/string16.h"
// Trims |text| to contain only the range from |start| to |end| and |keep| words
// on either side of the range. The |start| and |end| parameters are character
// indexes into |text|. The |keep| parameter is the number of words to keep on
// either side of the |start|-|end| range. The function updates |start| in
// accordance with the trimming.
//
// Example:
//
// size_t start = 14;
// size_t end = 23;
// string16 text = ASCIIToUTF16("one two three four five six seven eight");
// int keep = 2;
// string16 trimmed = TrimWords(&start, end, text, keep);
// DCHECK(trimmed == ASCIIToUTF16("two three four five six seven"));
// DCHECK(start == 10);
//
string16 TrimWords(
size_t* start,
size_t end,
const string16& text,
size_t keep);
#endif // CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
## Instruction:
Remove unnecessary include in spellcheck word trimmer
TBR=groby
BUG=170514
Review URL: https://chromiumcodereview.appspot.com/14273022
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@196650 0039d316-1c4b-4281-b951-d872f2087c98
## Code After:
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#define CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#include "base/string16.h"
// Trims |text| to contain only the range from |start| to |end| and |keep| words
// on either side of the range. The |start| and |end| parameters are character
// indexes into |text|. The |keep| parameter is the number of words to keep on
// either side of the |start|-|end| range. The function updates |start| in
// accordance with the trimming.
//
// Example:
//
// size_t start = 14;
// size_t end = 23;
// string16 text = ASCIIToUTF16("one two three four five six seven eight");
// int keep = 2;
// string16 trimmed = TrimWords(&start, end, text, keep);
// DCHECK(trimmed == ASCIIToUTF16("two three four five six seven"));
// DCHECK(start == 10);
//
string16 TrimWords(
size_t* start,
size_t end,
const string16& text,
size_t keep);
#endif // CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
|
// ... existing code ...
#ifndef CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#define CHROME_BROWSER_SPELLCHECKER_WORD_TRIMMER_H_
#include "base/string16.h"
// Trims |text| to contain only the range from |start| to |end| and |keep| words
// ... rest of the code ...
|
fcd0acef18a66e07b455a3f078501671bf5e83c1
|
extensions/mutiny/deployment/src/main/java/io/quarkus/mutiny/deployment/MutinyProcessor.java
|
extensions/mutiny/deployment/src/main/java/io/quarkus/mutiny/deployment/MutinyProcessor.java
|
package io.quarkus.mutiny.deployment;
import java.util.concurrent.ExecutorService;
import io.quarkus.deployment.Feature;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.ExecutorBuildItem;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.mutiny.runtime.MutinyInfrastructure;
public class MutinyProcessor {
@BuildStep
public FeatureBuildItem registerFeature() {
return new FeatureBuildItem(Feature.MUTINY);
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void initExecutor(ExecutorBuildItem executorBuildItem, MutinyInfrastructure recorder) {
ExecutorService executor = executorBuildItem.getExecutorProxy();
recorder.configureMutinyInfrastructure(executor);
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void defineDroppedExceptionHandler(MutinyInfrastructure recorder) {
recorder.configureDroppedExceptionHandler();
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void defineThreadBlockingChecker(MutinyInfrastructure recorder) {
recorder.configureThreadBlockingChecker();
}
}
|
package io.quarkus.mutiny.deployment;
import java.util.concurrent.ExecutorService;
import io.quarkus.deployment.Feature;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.ExecutorBuildItem;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.mutiny.runtime.MutinyInfrastructure;
public class MutinyProcessor {
@BuildStep
public FeatureBuildItem registerFeature() {
return new FeatureBuildItem(Feature.MUTINY);
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void initExecutor(ExecutorBuildItem executorBuildItem, MutinyInfrastructure recorder) {
ExecutorService executor = executorBuildItem.getExecutorProxy();
recorder.configureMutinyInfrastructure(executor);
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
public void defineDroppedExceptionHandler(MutinyInfrastructure recorder) {
recorder.configureDroppedExceptionHandler();
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
public void defineThreadBlockingChecker(MutinyInfrastructure recorder) {
recorder.configureThreadBlockingChecker();
}
}
|
Move Mutiny infrastructure initialization to STATIC_INIT
|
Move Mutiny infrastructure initialization to STATIC_INIT
|
Java
|
apache-2.0
|
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
|
java
|
## Code Before:
package io.quarkus.mutiny.deployment;
import java.util.concurrent.ExecutorService;
import io.quarkus.deployment.Feature;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.ExecutorBuildItem;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.mutiny.runtime.MutinyInfrastructure;
public class MutinyProcessor {
@BuildStep
public FeatureBuildItem registerFeature() {
return new FeatureBuildItem(Feature.MUTINY);
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void initExecutor(ExecutorBuildItem executorBuildItem, MutinyInfrastructure recorder) {
ExecutorService executor = executorBuildItem.getExecutorProxy();
recorder.configureMutinyInfrastructure(executor);
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void defineDroppedExceptionHandler(MutinyInfrastructure recorder) {
recorder.configureDroppedExceptionHandler();
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void defineThreadBlockingChecker(MutinyInfrastructure recorder) {
recorder.configureThreadBlockingChecker();
}
}
## Instruction:
Move Mutiny infrastructure initialization to STATIC_INIT
## Code After:
package io.quarkus.mutiny.deployment;
import java.util.concurrent.ExecutorService;
import io.quarkus.deployment.Feature;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.ExecutorBuildItem;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.mutiny.runtime.MutinyInfrastructure;
public class MutinyProcessor {
@BuildStep
public FeatureBuildItem registerFeature() {
return new FeatureBuildItem(Feature.MUTINY);
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void initExecutor(ExecutorBuildItem executorBuildItem, MutinyInfrastructure recorder) {
ExecutorService executor = executorBuildItem.getExecutorProxy();
recorder.configureMutinyInfrastructure(executor);
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
public void defineDroppedExceptionHandler(MutinyInfrastructure recorder) {
recorder.configureDroppedExceptionHandler();
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
public void defineThreadBlockingChecker(MutinyInfrastructure recorder) {
recorder.configureThreadBlockingChecker();
}
}
|
...
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
public void defineDroppedExceptionHandler(MutinyInfrastructure recorder) {
recorder.configureDroppedExceptionHandler();
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
public void defineThreadBlockingChecker(MutinyInfrastructure recorder) {
recorder.configureThreadBlockingChecker();
}
...
|
fdc07735c60e95f296a32f12fcc438c4aa968db9
|
src/main/java/GraphDrawer.java
|
src/main/java/GraphDrawer.java
|
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
import java.io.IOException;
import java.util.HashMap;
/**
* Created by TUDelft SID on 8-5-2017.
*/
public class GraphDrawer {
private int Xsize = 10;
private int Ysize = 6;
private int lengthEdge = 2;
private int yBase = 40;
private SequenceGraph graph;
private GraphicsContext gc;
public GraphDrawer(SequenceGraph graph, GraphicsContext gc) {
this.graph = graph;
this.gc = gc;
graph.initialize();
graph.layerizeGraph();
}
public void drawShapes() throws IOException {
HashMap<Integer, SequenceNode> nodes = graph.getNodes();
for(int i = 1; i <= nodes.size(); i++) {
SequenceNode node = nodes.get(i);
gc.setFill(Color.BLUE);
gc.fillRoundRect((node.getColumn() * (Xsize + lengthEdge)) + 50, yBase, Xsize, Ysize, 10, 10);
// gc.setStroke(Color.BLACK);
// gc.setLineWidth(1);
// gc.strokeLine((node.getColumn() * (Xsize + lengthEdge)) + Xsize + 50,43, node.getColumn() * (Xsize + Xsize + lengthEdge) + 50, 43);
}
}
}
|
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
import java.io.IOException;
import java.util.HashMap;
/**
* Created by TUDelft SID on 8-5-2017.
*/
public class GraphDrawer {
public static final int X_SIZE = 10;
public static final int Y_SIZE = 6;
public static final int EDGE_LENGTH = 2;
public static final int Y_BASE = 40;
private SequenceGraph graph;
private GraphicsContext gc;
public GraphDrawer(SequenceGraph graph, GraphicsContext gc) {
this.graph = graph;
this.gc = gc;
graph.initialize();
graph.layerizeGraph();
}
public void drawShapes() throws IOException {
HashMap<Integer, SequenceNode> nodes = graph.getNodes();
for(int i = 1; i <= nodes.size(); i++) {
SequenceNode node = nodes.get(i);
gc.setFill(Color.BLUE);
gc.fillRoundRect((node.getColumn() * (X_SIZE + EDGE_LENGTH)) + 50, Y_BASE, X_SIZE, Y_SIZE, 10, 10);
// gc.setStroke(Color.BLACK);
// gc.setLineWidth(1);
// gc.strokeLine((node.getColumn() * (Xsize + lengthEdge)) + Xsize + 50,43, node.getColumn() * (Xsize + Xsize + lengthEdge) + 50, 43);
}
}
}
|
Use final number instead of magic numbers
|
Use final number instead of magic numbers
|
Java
|
apache-2.0
|
ProgrammingLife2017/DynamiteAndButterflies
|
java
|
## Code Before:
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
import java.io.IOException;
import java.util.HashMap;
/**
* Created by TUDelft SID on 8-5-2017.
*/
public class GraphDrawer {
private int Xsize = 10;
private int Ysize = 6;
private int lengthEdge = 2;
private int yBase = 40;
private SequenceGraph graph;
private GraphicsContext gc;
public GraphDrawer(SequenceGraph graph, GraphicsContext gc) {
this.graph = graph;
this.gc = gc;
graph.initialize();
graph.layerizeGraph();
}
public void drawShapes() throws IOException {
HashMap<Integer, SequenceNode> nodes = graph.getNodes();
for(int i = 1; i <= nodes.size(); i++) {
SequenceNode node = nodes.get(i);
gc.setFill(Color.BLUE);
gc.fillRoundRect((node.getColumn() * (Xsize + lengthEdge)) + 50, yBase, Xsize, Ysize, 10, 10);
// gc.setStroke(Color.BLACK);
// gc.setLineWidth(1);
// gc.strokeLine((node.getColumn() * (Xsize + lengthEdge)) + Xsize + 50,43, node.getColumn() * (Xsize + Xsize + lengthEdge) + 50, 43);
}
}
}
## Instruction:
Use final number instead of magic numbers
## Code After:
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
import java.io.IOException;
import java.util.HashMap;
/**
* Created by TUDelft SID on 8-5-2017.
*/
public class GraphDrawer {
public static final int X_SIZE = 10;
public static final int Y_SIZE = 6;
public static final int EDGE_LENGTH = 2;
public static final int Y_BASE = 40;
private SequenceGraph graph;
private GraphicsContext gc;
public GraphDrawer(SequenceGraph graph, GraphicsContext gc) {
this.graph = graph;
this.gc = gc;
graph.initialize();
graph.layerizeGraph();
}
public void drawShapes() throws IOException {
HashMap<Integer, SequenceNode> nodes = graph.getNodes();
for(int i = 1; i <= nodes.size(); i++) {
SequenceNode node = nodes.get(i);
gc.setFill(Color.BLUE);
gc.fillRoundRect((node.getColumn() * (X_SIZE + EDGE_LENGTH)) + 50, Y_BASE, X_SIZE, Y_SIZE, 10, 10);
// gc.setStroke(Color.BLACK);
// gc.setLineWidth(1);
// gc.strokeLine((node.getColumn() * (Xsize + lengthEdge)) + Xsize + 50,43, node.getColumn() * (Xsize + Xsize + lengthEdge) + 50, 43);
}
}
}
|
// ... existing code ...
*/
public class GraphDrawer {
public static final int X_SIZE = 10;
public static final int Y_SIZE = 6;
public static final int EDGE_LENGTH = 2;
public static final int Y_BASE = 40;
private SequenceGraph graph;
private GraphicsContext gc;
// ... modified code ...
for(int i = 1; i <= nodes.size(); i++) {
SequenceNode node = nodes.get(i);
gc.setFill(Color.BLUE);
gc.fillRoundRect((node.getColumn() * (X_SIZE + EDGE_LENGTH)) + 50, Y_BASE, X_SIZE, Y_SIZE, 10, 10);
// gc.setStroke(Color.BLACK);
// gc.setLineWidth(1);
// gc.strokeLine((node.getColumn() * (Xsize + lengthEdge)) + Xsize + 50,43, node.getColumn() * (Xsize + Xsize + lengthEdge) + 50, 43);
// ... rest of the code ...
|
d8375d3e3a4a00598ac0cdc38861be9f56fb58c0
|
edison/tests/sanity_tests.py
|
edison/tests/sanity_tests.py
|
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
|
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
self.assertFalse(False)
|
Add another inane test to trigger Landscape
|
Add another inane test to trigger Landscape
|
Python
|
mit
|
briancline/edison
|
python
|
## Code Before:
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
## Instruction:
Add another inane test to trigger Landscape
## Code After:
from edison.tests import unittest
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
self.assertFalse(False)
|
// ... existing code ...
class SanityTests(unittest.TestCase):
def test_psych(self):
self.assertTrue(True)
self.assertFalse(False)
// ... rest of the code ...
|
b4d86431806cc9ce3019aa65db2a3b22c2bd4ac3
|
flask_skeleton_api/views/general.py
|
flask_skeleton_api/views/general.py
|
from flask import request, Blueprint, Response
from flask import current_app
import json
# This is the blueprint object that gets registered into the app in blueprints.py.
general = Blueprint('general', __name__)
@general.route("/health")
def check_status():
return Response(response=json.dumps({
"app": "flask-skeleton-api",
"status": "OK",
"headers": str(request.headers),
"commit": current_app.config["COMMIT"]
}), mimetype='application/json', status=200)
|
from flask import request, Blueprint, Response
from flask import current_app
import json
# This is the blueprint object that gets registered into the app in blueprints.py.
general = Blueprint('general', __name__)
@general.route("/health")
def check_status():
return Response(response=json.dumps({
"app": "flask-skeleton-api",
"status": "OK",
"headers": request.headers.to_list(),
"commit": current_app.config["COMMIT"]
}), mimetype='application/json', status=200)
|
Improve list of headers returned with health route in order to remove /r/n
|
Improve list of headers returned with health route in order to remove /r/n
|
Python
|
mit
|
matthew-shaw/thing-api
|
python
|
## Code Before:
from flask import request, Blueprint, Response
from flask import current_app
import json
# This is the blueprint object that gets registered into the app in blueprints.py.
general = Blueprint('general', __name__)
@general.route("/health")
def check_status():
return Response(response=json.dumps({
"app": "flask-skeleton-api",
"status": "OK",
"headers": str(request.headers),
"commit": current_app.config["COMMIT"]
}), mimetype='application/json', status=200)
## Instruction:
Improve list of headers returned with health route in order to remove /r/n
## Code After:
from flask import request, Blueprint, Response
from flask import current_app
import json
# This is the blueprint object that gets registered into the app in blueprints.py.
general = Blueprint('general', __name__)
@general.route("/health")
def check_status():
return Response(response=json.dumps({
"app": "flask-skeleton-api",
"status": "OK",
"headers": request.headers.to_list(),
"commit": current_app.config["COMMIT"]
}), mimetype='application/json', status=200)
|
// ... existing code ...
return Response(response=json.dumps({
"app": "flask-skeleton-api",
"status": "OK",
"headers": request.headers.to_list(),
"commit": current_app.config["COMMIT"]
}), mimetype='application/json', status=200)
// ... rest of the code ...
|
eebf41e6cf85f98d034708999e0321f9e09e4093
|
setup.py
|
setup.py
|
import names
from setuptools import setup, find_packages
setup(
name=names.__title__,
version=names.__version__,
author=names.__author__,
url="https://github.com/treyhunner/names",
description="Generate random names",
license=names.__license__,
packages=find_packages(),
package_data={'names': ['dist.*']},
include_package_data=True,
entry_points={
'console_scripts': [
'names = names.main:main',
],
},
)
|
import names
from setuptools import setup, find_packages
setup(
name=names.__title__,
version=names.__version__,
author=names.__author__,
url="https://github.com/treyhunner/names",
description="Generate random names",
long_description=open('README.rst').read(),
license=names.__license__,
packages=find_packages(),
package_data={'names': ['dist.*']},
include_package_data=True,
entry_points={
'console_scripts': [
'names = names.main:main',
],
},
)
|
Use readme file for package long description
|
Use readme file for package long description
|
Python
|
mit
|
treyhunner/names,treyhunner/names
|
python
|
## Code Before:
import names
from setuptools import setup, find_packages
setup(
name=names.__title__,
version=names.__version__,
author=names.__author__,
url="https://github.com/treyhunner/names",
description="Generate random names",
license=names.__license__,
packages=find_packages(),
package_data={'names': ['dist.*']},
include_package_data=True,
entry_points={
'console_scripts': [
'names = names.main:main',
],
},
)
## Instruction:
Use readme file for package long description
## Code After:
import names
from setuptools import setup, find_packages
setup(
name=names.__title__,
version=names.__version__,
author=names.__author__,
url="https://github.com/treyhunner/names",
description="Generate random names",
long_description=open('README.rst').read(),
license=names.__license__,
packages=find_packages(),
package_data={'names': ['dist.*']},
include_package_data=True,
entry_points={
'console_scripts': [
'names = names.main:main',
],
},
)
|
# ... existing code ...
author=names.__author__,
url="https://github.com/treyhunner/names",
description="Generate random names",
long_description=open('README.rst').read(),
license=names.__license__,
packages=find_packages(),
package_data={'names': ['dist.*']},
# ... rest of the code ...
|
969bc09c515f208738da67ebf77ef543ab358613
|
leonardo_agenda/__init__.py
|
leonardo_agenda/__init__.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
default_app_config = 'leonardo_agenda.Config'
LEONARDO_OPTGROUP = 'Events'
LEONARDO_APPS = [
'leonardo_agenda',
'elephantagenda',
'elephantagenda.backends.agenda'
]
LEONARDO_WIDGETS = [
'leonardo_agenda.models.EventsWidget'
]
LEONARDO_PLUGINS = [
('leonardo_agenda.apps.events', _('Events'), ),
]
LEONARDO_ABSOLUTE_URL_OVERRIDES = {
'agenda.event': 'leonardo_agenda.overrides.event'
}
class Config(AppConfig):
name = 'leonardo_agenda'
verbose_name = "leonardo-agenda"
def ready(self):
from ckeditor_uploader.widgets import CKEditorUploadingWidget
from elephantagenda.backends.agenda import models
models.EventAdminForm._meta.widgets.update({
'description': CKEditorUploadingWidget(),
'short_description': CKEditorUploadingWidget()
})
try:
from ckeditor_uploader.widgets import CKEditorUploadingWidget
from elephantagenda.backends.agenda import models
models.EventAdminForm._meta.widgets[
'description'] = CKEditorUploadingWidget()
except Exception as e:
raise e
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
default_app_config = 'leonardo_agenda.Config'
LEONARDO_OPTGROUP = 'Events'
LEONARDO_APPS = [
'leonardo_agenda',
'elephantagenda',
'elephantagenda.backends.agenda'
]
LEONARDO_WIDGETS = [
'leonardo_agenda.models.EventsWidget'
]
LEONARDO_PLUGINS = [
('leonardo_agenda.apps.events', _('Events'), ),
]
LEONARDO_ABSOLUTE_URL_OVERRIDES = {
'agenda.event': 'leonardo_agenda.overrides.event'
}
class Config(AppConfig):
name = 'leonardo_agenda'
verbose_name = "leonardo-agenda"
def ready(self):
try:
from leonardo.utils import get_htmltext_widget
from elephantagenda.backends.agenda import models
models.EventAdminForm._meta.widgets.update({
'description': get_htmltext_widget,
'short_description': get_htmltext_widget
})
except:
pass
|
Use leonardo helper for declare html text widget.
|
Use leonardo helper for declare html text widget.
|
Python
|
bsd-3-clause
|
leonardo-modules/leonardo-agenda,leonardo-modules/leonardo-agenda,leonardo-modules/leonardo-agenda
|
python
|
## Code Before:
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
default_app_config = 'leonardo_agenda.Config'
LEONARDO_OPTGROUP = 'Events'
LEONARDO_APPS = [
'leonardo_agenda',
'elephantagenda',
'elephantagenda.backends.agenda'
]
LEONARDO_WIDGETS = [
'leonardo_agenda.models.EventsWidget'
]
LEONARDO_PLUGINS = [
('leonardo_agenda.apps.events', _('Events'), ),
]
LEONARDO_ABSOLUTE_URL_OVERRIDES = {
'agenda.event': 'leonardo_agenda.overrides.event'
}
class Config(AppConfig):
name = 'leonardo_agenda'
verbose_name = "leonardo-agenda"
def ready(self):
from ckeditor_uploader.widgets import CKEditorUploadingWidget
from elephantagenda.backends.agenda import models
models.EventAdminForm._meta.widgets.update({
'description': CKEditorUploadingWidget(),
'short_description': CKEditorUploadingWidget()
})
try:
from ckeditor_uploader.widgets import CKEditorUploadingWidget
from elephantagenda.backends.agenda import models
models.EventAdminForm._meta.widgets[
'description'] = CKEditorUploadingWidget()
except Exception as e:
raise e
## Instruction:
Use leonardo helper for declare html text widget.
## Code After:
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
default_app_config = 'leonardo_agenda.Config'
LEONARDO_OPTGROUP = 'Events'
LEONARDO_APPS = [
'leonardo_agenda',
'elephantagenda',
'elephantagenda.backends.agenda'
]
LEONARDO_WIDGETS = [
'leonardo_agenda.models.EventsWidget'
]
LEONARDO_PLUGINS = [
('leonardo_agenda.apps.events', _('Events'), ),
]
LEONARDO_ABSOLUTE_URL_OVERRIDES = {
'agenda.event': 'leonardo_agenda.overrides.event'
}
class Config(AppConfig):
name = 'leonardo_agenda'
verbose_name = "leonardo-agenda"
def ready(self):
try:
from leonardo.utils import get_htmltext_widget
from elephantagenda.backends.agenda import models
models.EventAdminForm._meta.widgets.update({
'description': get_htmltext_widget,
'short_description': get_htmltext_widget
})
except:
pass
|
# ... existing code ...
def ready(self):
try:
from leonardo.utils import get_htmltext_widget
from elephantagenda.backends.agenda import models
models.EventAdminForm._meta.widgets.update({
'description': get_htmltext_widget,
'short_description': get_htmltext_widget
})
except:
pass
# ... rest of the code ...
|
4e84dc31d52412a9d58d5f0c54f5514c0eac5137
|
console.py
|
console.py
|
from dumpster import Dumpster
import os
i = input('\r>')
if i == 'list':
cwd = os.getcwd()
lcd = os.listdir()
dump = ''
for file in lcd:
if '.dmp' in file:
dump+= ' '+file
print(dump)
|
from dumpster import Dumpster
import os
running = True
selected = ''
while running:
#cwd = os.getcwd()
i = input('\r%s>'%(selected))
if i == 'exit':
running = False
if i[0:6] == 'create':
name = i[7:]
Dumpster(name).write_to_dump()
if i == 'list':
if selected is 'none': #list currrent working directory
dirs = ''
lcd = os.listdir()
for file in lcd:
if '.dmp' in file:
dirs+= ' '+file.strip('.dmp')
print(dirs)
else: #list selected dump
#.......................................
if i[0:6] == 'select':
name = i[7:]
selected = name
|
Select and Create and List
|
Select and Create and List
|
Python
|
apache-2.0
|
SirGuyOfGibson/source-dump
|
python
|
## Code Before:
from dumpster import Dumpster
import os
i = input('\r>')
if i == 'list':
cwd = os.getcwd()
lcd = os.listdir()
dump = ''
for file in lcd:
if '.dmp' in file:
dump+= ' '+file
print(dump)
## Instruction:
Select and Create and List
## Code After:
from dumpster import Dumpster
import os
running = True
selected = ''
while running:
#cwd = os.getcwd()
i = input('\r%s>'%(selected))
if i == 'exit':
running = False
if i[0:6] == 'create':
name = i[7:]
Dumpster(name).write_to_dump()
if i == 'list':
if selected is 'none': #list currrent working directory
dirs = ''
lcd = os.listdir()
for file in lcd:
if '.dmp' in file:
dirs+= ' '+file.strip('.dmp')
print(dirs)
else: #list selected dump
#.......................................
if i[0:6] == 'select':
name = i[7:]
selected = name
|
// ... existing code ...
from dumpster import Dumpster
import os
running = True
selected = ''
while running:
#cwd = os.getcwd()
i = input('\r%s>'%(selected))
if i == 'exit':
running = False
if i[0:6] == 'create':
name = i[7:]
Dumpster(name).write_to_dump()
if i == 'list':
if selected is 'none': #list currrent working directory
dirs = ''
lcd = os.listdir()
for file in lcd:
if '.dmp' in file:
dirs+= ' '+file.strip('.dmp')
print(dirs)
else: #list selected dump
#.......................................
if i[0:6] == 'select':
name = i[7:]
selected = name
// ... rest of the code ...
|
862fa3a737ef944899958551c66bcf6ad8fb4c86
|
packages/nuget.py
|
packages/nuget.py
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
Set PREFIX in make as well as makeinstall
|
Set PREFIX in make as well as makeinstall
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild
|
python
|
## Code Before:
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
## Instruction:
Set PREFIX in make as well as makeinstall
## Code After:
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
...
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
...
|
a470c0e15aec349a5704b1292f08d47a179625db
|
northbound-api/src/main/java/org/opendaylight/neutron/northbound/api/INeutronRequest.java
|
northbound-api/src/main/java/org/opendaylight/neutron/northbound/api/INeutronRequest.java
|
/*
* Copyright (C) 2014 Red Hat, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Authors : Dave Tucker
*/
package org.opendaylight.neutron.northbound.api;
import org.opendaylight.neutron.spi.INeutronObject;
import java.util.List;
public interface INeutronRequest<T extends INeutronObject> {
public T getSingleton();
public boolean isSingleton();
public List<T> getBulk();
}
|
/*
* Copyright (C) 2014 Red Hat, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Authors : Dave Tucker
*/
package org.opendaylight.neutron.northbound.api;
import org.opendaylight.neutron.spi.INeutronObject;
import java.util.List;
public interface INeutronRequest<T extends INeutronObject> {
T getSingleton();
boolean isSingleton();
List<T> getBulk();
}
|
Remove redundant modifiers from northbound interfaces
|
Remove redundant modifiers from northbound interfaces
Change-Id: I83ed14c21255e8fc22bd688d00ea8b87d833b344
Signed-off-by: Ryan Moats <[email protected]>
|
Java
|
epl-1.0
|
opendaylight/neutron
|
java
|
## Code Before:
/*
* Copyright (C) 2014 Red Hat, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Authors : Dave Tucker
*/
package org.opendaylight.neutron.northbound.api;
import org.opendaylight.neutron.spi.INeutronObject;
import java.util.List;
public interface INeutronRequest<T extends INeutronObject> {
public T getSingleton();
public boolean isSingleton();
public List<T> getBulk();
}
## Instruction:
Remove redundant modifiers from northbound interfaces
Change-Id: I83ed14c21255e8fc22bd688d00ea8b87d833b344
Signed-off-by: Ryan Moats <[email protected]>
## Code After:
/*
* Copyright (C) 2014 Red Hat, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Authors : Dave Tucker
*/
package org.opendaylight.neutron.northbound.api;
import org.opendaylight.neutron.spi.INeutronObject;
import java.util.List;
public interface INeutronRequest<T extends INeutronObject> {
T getSingleton();
boolean isSingleton();
List<T> getBulk();
}
|
...
import java.util.List;
public interface INeutronRequest<T extends INeutronObject> {
T getSingleton();
boolean isSingleton();
List<T> getBulk();
}
...
|
d4441a0e7d10767b97776fd3046382492132f407
|
src/Article.h
|
src/Article.h
|
/*!
* represents a Wikipedia (Mediawiki) and its links
*/
class Article
{
public:
typedef std::vector<Article*> ArticleLinkStorage;
typedef std::vector<Article*>::const_iterator ArticleLinkIterator;
/*! Create a new article from a title
* \param title The title of the article
*/
Article(std::string title)
: title(title) {};
//! Get the title of the article
std::string getTitle() const
{
return title;
}
//! get the number of links the article has
size_t getNumLinks() const;
/*! Add a link to another article
* \param[in] article Pointer to the article this article links
* to
*/
void addLink(Article* article)
{
links.push_back(article);
}
/*! Get const_iterator to first linked article */
ArticleLinkIterator linkBegin() const
{
return links.cbegin();
}
/*! Get const_iterator to last linked article */
ArticleLinkIterator linkEnd() const
{
return links.cend();
}
private:
std::string title;
ArticleLinkStorage links;
};
#endif //_ARTICLE_H
|
/*!
* represents a Wikipedia (Mediawiki) and its links
*/
class Article
{
public:
//! representation of links to other articles
typedef std::vector<Article*> ArticleLinkStorage;
//! representation of iterator over links
typedef std::vector<Article*>::iterator ArticleLinkIterator;
//! representation of const iterator over links
typedef std::vector<Article*>::const_iterator ArticleLinkConstIterator;
/*! Create a new article from a title
* \param title The title of the article
*/
Article(std::string title)
: title(title) {};
//! Get the title of the article
std::string getTitle() const
{
return title;
}
//! get the number of links the article has
size_t getNumLinks() const;
/*! Add a link to another article
* \param[in] article Pointer to the article this article links
* to
*/
void addLink(Article* article)
{
links.push_back(article);
}
/*! Get const_iterator to first linked article */
ArticleLinkConstIterator linkBegin() const
{
return links.cbegin();
}
/*! Get const_iterator to last linked article */
ArticleLinkConstIterator linkEnd() const
{
return links.cend();
}
private:
std::string title;
ArticleLinkStorage links;
};
#endif //_ARTICLE_H
|
Add modifying article link iterator typedef
|
Add modifying article link iterator typedef
|
C
|
mit
|
dueringa/WikiWalker
|
c
|
## Code Before:
/*!
* represents a Wikipedia (Mediawiki) and its links
*/
class Article
{
public:
typedef std::vector<Article*> ArticleLinkStorage;
typedef std::vector<Article*>::const_iterator ArticleLinkIterator;
/*! Create a new article from a title
* \param title The title of the article
*/
Article(std::string title)
: title(title) {};
//! Get the title of the article
std::string getTitle() const
{
return title;
}
//! get the number of links the article has
size_t getNumLinks() const;
/*! Add a link to another article
* \param[in] article Pointer to the article this article links
* to
*/
void addLink(Article* article)
{
links.push_back(article);
}
/*! Get const_iterator to first linked article */
ArticleLinkIterator linkBegin() const
{
return links.cbegin();
}
/*! Get const_iterator to last linked article */
ArticleLinkIterator linkEnd() const
{
return links.cend();
}
private:
std::string title;
ArticleLinkStorage links;
};
#endif //_ARTICLE_H
## Instruction:
Add modifying article link iterator typedef
## Code After:
/*!
* represents a Wikipedia (Mediawiki) and its links
*/
class Article
{
public:
//! representation of links to other articles
typedef std::vector<Article*> ArticleLinkStorage;
//! representation of iterator over links
typedef std::vector<Article*>::iterator ArticleLinkIterator;
//! representation of const iterator over links
typedef std::vector<Article*>::const_iterator ArticleLinkConstIterator;
/*! Create a new article from a title
* \param title The title of the article
*/
Article(std::string title)
: title(title) {};
//! Get the title of the article
std::string getTitle() const
{
return title;
}
//! get the number of links the article has
size_t getNumLinks() const;
/*! Add a link to another article
* \param[in] article Pointer to the article this article links
* to
*/
void addLink(Article* article)
{
links.push_back(article);
}
/*! Get const_iterator to first linked article */
ArticleLinkConstIterator linkBegin() const
{
return links.cbegin();
}
/*! Get const_iterator to last linked article */
ArticleLinkConstIterator linkEnd() const
{
return links.cend();
}
private:
std::string title;
ArticleLinkStorage links;
};
#endif //_ARTICLE_H
|
...
class Article
{
public:
//! representation of links to other articles
typedef std::vector<Article*> ArticleLinkStorage;
//! representation of iterator over links
typedef std::vector<Article*>::iterator ArticleLinkIterator;
//! representation of const iterator over links
typedef std::vector<Article*>::const_iterator ArticleLinkConstIterator;
/*! Create a new article from a title
* \param title The title of the article
...
}
/*! Get const_iterator to first linked article */
ArticleLinkConstIterator linkBegin() const
{
return links.cbegin();
}
/*! Get const_iterator to last linked article */
ArticleLinkConstIterator linkEnd() const
{
return links.cend();
}
...
|
04c226aab970da86fc9f59ec3a808cf916458261
|
test/CodeGen/arm-neon-fma.c
|
test/CodeGen/arm-neon-fma.c
|
// REQUIRES: arm-registered-target
// RUN: %clang -target thumbv7-none-linux-gnueabihf \
// RUN: -mcpu=cortex-a8 -mfloat-abi=hard \
// RUN: -O3 -S -emit-llvm -o - %s | FileCheck %s
#include <arm_neon.h>
float32x2_t test_fma_order(float32x2_t accum, float32x2_t lhs, float32x2_t rhs) {
return vfma_f32(accum, lhs, rhs);
// CHECK: call <2 x float> @llvm.fma.v2f32(<2 x float> %lhs, <2 x float> %rhs, <2 x float> %accum)
}
float32x4_t test_fmaq_order(float32x4_t accum, float32x4_t lhs, float32x4_t rhs) {
return vfmaq_f32(accum, lhs, rhs);
// CHECK: call <4 x float> @llvm.fma.v4f32(<4 x float> %lhs, <4 x float> %rhs, <4 x float> %accum)
}
|
// REQUIRES: arm-registered-target
// RUN: %clang_cc1 -triple thumbv7-none-linux-gnueabihf \
// RUN: -target-abi aapcs \
// RUN: -target-cpu cortex-a8 \
// RUN: -mfloat-abi hard \
// RUN: -ffreestanding \
// RUN: -O3 -S -emit-llvm -o - %s | FileCheck %s
#include <arm_neon.h>
float32x2_t test_fma_order(float32x2_t accum, float32x2_t lhs, float32x2_t rhs) {
return vfma_f32(accum, lhs, rhs);
// CHECK: call <2 x float> @llvm.fma.v2f32(<2 x float> %lhs, <2 x float> %rhs, <2 x float> %accum)
}
float32x4_t test_fmaq_order(float32x4_t accum, float32x4_t lhs, float32x4_t rhs) {
return vfmaq_f32(accum, lhs, rhs);
// CHECK: call <4 x float> @llvm.fma.v4f32(<4 x float> %lhs, <4 x float> %rhs, <4 x float> %accum)
}
|
Fix recent test for more diverse environments.
|
Fix recent test for more diverse environments.
I think the main issue was the lack of -ffreestanding, which pulled in
the host's stdint.h. After that things went rapidly downhill.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@172653 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang
|
c
|
## Code Before:
// REQUIRES: arm-registered-target
// RUN: %clang -target thumbv7-none-linux-gnueabihf \
// RUN: -mcpu=cortex-a8 -mfloat-abi=hard \
// RUN: -O3 -S -emit-llvm -o - %s | FileCheck %s
#include <arm_neon.h>
float32x2_t test_fma_order(float32x2_t accum, float32x2_t lhs, float32x2_t rhs) {
return vfma_f32(accum, lhs, rhs);
// CHECK: call <2 x float> @llvm.fma.v2f32(<2 x float> %lhs, <2 x float> %rhs, <2 x float> %accum)
}
float32x4_t test_fmaq_order(float32x4_t accum, float32x4_t lhs, float32x4_t rhs) {
return vfmaq_f32(accum, lhs, rhs);
// CHECK: call <4 x float> @llvm.fma.v4f32(<4 x float> %lhs, <4 x float> %rhs, <4 x float> %accum)
}
## Instruction:
Fix recent test for more diverse environments.
I think the main issue was the lack of -ffreestanding, which pulled in
the host's stdint.h. After that things went rapidly downhill.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@172653 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// REQUIRES: arm-registered-target
// RUN: %clang_cc1 -triple thumbv7-none-linux-gnueabihf \
// RUN: -target-abi aapcs \
// RUN: -target-cpu cortex-a8 \
// RUN: -mfloat-abi hard \
// RUN: -ffreestanding \
// RUN: -O3 -S -emit-llvm -o - %s | FileCheck %s
#include <arm_neon.h>
float32x2_t test_fma_order(float32x2_t accum, float32x2_t lhs, float32x2_t rhs) {
return vfma_f32(accum, lhs, rhs);
// CHECK: call <2 x float> @llvm.fma.v2f32(<2 x float> %lhs, <2 x float> %rhs, <2 x float> %accum)
}
float32x4_t test_fmaq_order(float32x4_t accum, float32x4_t lhs, float32x4_t rhs) {
return vfmaq_f32(accum, lhs, rhs);
// CHECK: call <4 x float> @llvm.fma.v4f32(<4 x float> %lhs, <4 x float> %rhs, <4 x float> %accum)
}
|
# ... existing code ...
// REQUIRES: arm-registered-target
// RUN: %clang_cc1 -triple thumbv7-none-linux-gnueabihf \
// RUN: -target-abi aapcs \
// RUN: -target-cpu cortex-a8 \
// RUN: -mfloat-abi hard \
// RUN: -ffreestanding \
// RUN: -O3 -S -emit-llvm -o - %s | FileCheck %s
#include <arm_neon.h>
# ... rest of the code ...
|
eb496468d61ff3245adbdec4108a04bc40a357fc
|
Grid.py
|
Grid.py
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
Allow gridlines on both major and minor axes.
|
Allow gridlines on both major and minor axes.
|
Python
|
bsd-3-clause
|
alexras/boomslang
|
python
|
## Code Before:
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
## Instruction:
Allow gridlines on both major and minor axes.
## Code After:
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
# ... existing code ...
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
# ... modified code ...
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
# ... rest of the code ...
|
7afe444fe1c4b1af6035f9d6b72276043eac35ab
|
3RVX/OSD/OSD.h
|
3RVX/OSD/OSD.h
|
// Copyright (c) 2015, Matthew Malensek.
// Distributed under the BSD 2-Clause License (see LICENSE.txt for details)
#pragma once
#include <Windows.h>
#include "../HotkeyProcessor.h"
#include "../MeterWnd/MeterWnd.h"
#include "../Settings.h"
#include "../Window.h"
#include "OSDType.h"
class Monitor;
class OSD : HotkeyProcessor, protected Window {
public:
OSD(LPCWSTR className, HINSTANCE hInstance = NULL);
virtual void Hide() = 0;
virtual void ProcessHotkeys(HotkeyInfo &hki);
bool Enabled();
void Enabled(bool enabled);
protected:
HWND _masterWnd;
Settings *_settings;
void HideOthers(OSDType except);
void InitMeterWnd(MeterWnd &mWnd);
std::vector<Monitor> ActiveMonitors();
void PositionWindow(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowX(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowY(Monitor monitor, LayeredWnd &lWnd);
virtual LRESULT WndProc(HWND hWnd, UINT message,
WPARAM wParam, LPARAM lParam);
private:
bool _enabled;
};
|
// Copyright (c) 2015, Matthew Malensek.
// Distributed under the BSD 2-Clause License (see LICENSE.txt for details)
#pragma once
#include <Windows.h>
#include "../HotkeyProcessor.h"
#include "../MeterWnd/MeterWnd.h"
#include "../Settings.h"
#include "../Window.h"
#include "OSDType.h"
class Monitor;
class OSD : HotkeyProcessor, protected Window {
public:
OSD(LPCWSTR className, HINSTANCE hInstance = NULL);
virtual void Hide() = 0;
virtual void ProcessHotkeys(HotkeyInfo &hki);
bool Enabled();
void Enabled(bool enabled);
/// <summary>
/// This method is called when the system display configuration has changed,
/// which includes monitors being removed or plugged in.
/// </summary>
virtual void OnDisplayChange() = 0;
protected:
HWND _masterWnd;
Settings *_settings;
void HideOthers(OSDType except);
void InitMeterWnd(MeterWnd &mWnd);
std::vector<Monitor> ActiveMonitors();
void PositionWindow(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowX(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowY(Monitor monitor, LayeredWnd &lWnd);
virtual LRESULT WndProc(HWND hWnd, UINT message,
WPARAM wParam, LPARAM lParam);
private:
bool _enabled;
};
|
Add the interface method for OnDisplayChange
|
Add the interface method for OnDisplayChange
|
C
|
bsd-2-clause
|
malensek/3RVX,malensek/3RVX,malensek/3RVX
|
c
|
## Code Before:
// Copyright (c) 2015, Matthew Malensek.
// Distributed under the BSD 2-Clause License (see LICENSE.txt for details)
#pragma once
#include <Windows.h>
#include "../HotkeyProcessor.h"
#include "../MeterWnd/MeterWnd.h"
#include "../Settings.h"
#include "../Window.h"
#include "OSDType.h"
class Monitor;
class OSD : HotkeyProcessor, protected Window {
public:
OSD(LPCWSTR className, HINSTANCE hInstance = NULL);
virtual void Hide() = 0;
virtual void ProcessHotkeys(HotkeyInfo &hki);
bool Enabled();
void Enabled(bool enabled);
protected:
HWND _masterWnd;
Settings *_settings;
void HideOthers(OSDType except);
void InitMeterWnd(MeterWnd &mWnd);
std::vector<Monitor> ActiveMonitors();
void PositionWindow(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowX(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowY(Monitor monitor, LayeredWnd &lWnd);
virtual LRESULT WndProc(HWND hWnd, UINT message,
WPARAM wParam, LPARAM lParam);
private:
bool _enabled;
};
## Instruction:
Add the interface method for OnDisplayChange
## Code After:
// Copyright (c) 2015, Matthew Malensek.
// Distributed under the BSD 2-Clause License (see LICENSE.txt for details)
#pragma once
#include <Windows.h>
#include "../HotkeyProcessor.h"
#include "../MeterWnd/MeterWnd.h"
#include "../Settings.h"
#include "../Window.h"
#include "OSDType.h"
class Monitor;
class OSD : HotkeyProcessor, protected Window {
public:
OSD(LPCWSTR className, HINSTANCE hInstance = NULL);
virtual void Hide() = 0;
virtual void ProcessHotkeys(HotkeyInfo &hki);
bool Enabled();
void Enabled(bool enabled);
/// <summary>
/// This method is called when the system display configuration has changed,
/// which includes monitors being removed or plugged in.
/// </summary>
virtual void OnDisplayChange() = 0;
protected:
HWND _masterWnd;
Settings *_settings;
void HideOthers(OSDType except);
void InitMeterWnd(MeterWnd &mWnd);
std::vector<Monitor> ActiveMonitors();
void PositionWindow(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowX(Monitor monitor, LayeredWnd &lWnd);
void CenterWindowY(Monitor monitor, LayeredWnd &lWnd);
virtual LRESULT WndProc(HWND hWnd, UINT message,
WPARAM wParam, LPARAM lParam);
private:
bool _enabled;
};
|
...
bool Enabled();
void Enabled(bool enabled);
/// <summary>
/// This method is called when the system display configuration has changed,
/// which includes monitors being removed or plugged in.
/// </summary>
virtual void OnDisplayChange() = 0;
protected:
HWND _masterWnd;
Settings *_settings;
...
|
1fe351b7107b9740f933209e0e967d693db320c4
|
src/main/java/nl/ekholabs/nlp/controller/SpeechToTextController.java
|
src/main/java/nl/ekholabs/nlp/controller/SpeechToTextController.java
|
package nl.ekholabs.nlp.controller;
import java.io.IOException;
import nl.ekholabs.nlp.model.TextResponse;
import nl.ekholabs.nlp.service.SpeechToTextService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import static org.springframework.http.MediaType.APPLICATION_JSON_UTF8_VALUE;
import static org.springframework.http.MediaType.MULTIPART_FORM_DATA_VALUE;
@RestController
public class SpeechToTextController {
private final SpeechToTextService speechToTextService;
@Autowired
public SpeechToTextController(final SpeechToTextService speechToTextService) {
this.speechToTextService = speechToTextService;
}
@PostMapping(path = "/", produces = APPLICATION_JSON_UTF8_VALUE, consumes = MULTIPART_FORM_DATA_VALUE)
public TextResponse process(final @RequestParam(value = "input") MultipartFile fileToProcess) throws IOException {
final String outputText = speechToTextService.processSpeech(fileToProcess.getBytes());
return new TextResponse(outputText);
}
}
|
package nl.ekholabs.nlp.controller;
import java.io.IOException;
import nl.ekholabs.nlp.model.TextResponse;
import nl.ekholabs.nlp.service.SpeechToTextService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import static org.springframework.http.MediaType.APPLICATION_JSON_UTF8_VALUE;
import static org.springframework.http.MediaType.MULTIPART_FORM_DATA_VALUE;
@RestController
public class SpeechToTextController {
private final SpeechToTextService speechToTextService;
@Autowired
public SpeechToTextController(final SpeechToTextService speechToTextService) {
this.speechToTextService = speechToTextService;
}
@PostMapping(produces = APPLICATION_JSON_UTF8_VALUE, consumes = MULTIPART_FORM_DATA_VALUE)
public TextResponse process(final @RequestParam(value = "input") MultipartFile fileToProcess) throws IOException {
final String outputText = speechToTextService.processSpeech(fileToProcess.getBytes());
return new TextResponse(outputText);
}
}
|
Remove path attribute from PostMapping
|
Remove path attribute from PostMapping
|
Java
|
mit
|
ekholabs/elsie-dee
|
java
|
## Code Before:
package nl.ekholabs.nlp.controller;
import java.io.IOException;
import nl.ekholabs.nlp.model.TextResponse;
import nl.ekholabs.nlp.service.SpeechToTextService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import static org.springframework.http.MediaType.APPLICATION_JSON_UTF8_VALUE;
import static org.springframework.http.MediaType.MULTIPART_FORM_DATA_VALUE;
@RestController
public class SpeechToTextController {
private final SpeechToTextService speechToTextService;
@Autowired
public SpeechToTextController(final SpeechToTextService speechToTextService) {
this.speechToTextService = speechToTextService;
}
@PostMapping(path = "/", produces = APPLICATION_JSON_UTF8_VALUE, consumes = MULTIPART_FORM_DATA_VALUE)
public TextResponse process(final @RequestParam(value = "input") MultipartFile fileToProcess) throws IOException {
final String outputText = speechToTextService.processSpeech(fileToProcess.getBytes());
return new TextResponse(outputText);
}
}
## Instruction:
Remove path attribute from PostMapping
## Code After:
package nl.ekholabs.nlp.controller;
import java.io.IOException;
import nl.ekholabs.nlp.model.TextResponse;
import nl.ekholabs.nlp.service.SpeechToTextService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import static org.springframework.http.MediaType.APPLICATION_JSON_UTF8_VALUE;
import static org.springframework.http.MediaType.MULTIPART_FORM_DATA_VALUE;
@RestController
public class SpeechToTextController {
private final SpeechToTextService speechToTextService;
@Autowired
public SpeechToTextController(final SpeechToTextService speechToTextService) {
this.speechToTextService = speechToTextService;
}
@PostMapping(produces = APPLICATION_JSON_UTF8_VALUE, consumes = MULTIPART_FORM_DATA_VALUE)
public TextResponse process(final @RequestParam(value = "input") MultipartFile fileToProcess) throws IOException {
final String outputText = speechToTextService.processSpeech(fileToProcess.getBytes());
return new TextResponse(outputText);
}
}
|
// ... existing code ...
this.speechToTextService = speechToTextService;
}
@PostMapping(produces = APPLICATION_JSON_UTF8_VALUE, consumes = MULTIPART_FORM_DATA_VALUE)
public TextResponse process(final @RequestParam(value = "input") MultipartFile fileToProcess) throws IOException {
final String outputText = speechToTextService.processSpeech(fileToProcess.getBytes());
// ... rest of the code ...
|
26a53141e844c11e7ff904af2620b7ee125b011d
|
diana/tracking.py
|
diana/tracking.py
|
from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
|
from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
|
Add a convenience method to get the player ship
|
Add a convenience method to get the player ship
|
Python
|
mit
|
prophile/libdiana
|
python
|
## Code Before:
from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
## Instruction:
Add a convenience method to get the player ship
## Code After:
from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
|
// ... existing code ...
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
// ... rest of the code ...
|
84ad348562e64084894e7c033de870a016390134
|
server/auth/auth.py
|
server/auth/auth.py
|
import json
from flask import Blueprint, request
from flask.ext.login import current_user, logout_user, login_user
from flask.ext.restful import Api, Resource, abort
from server.models import Lecturer, db
auth = Blueprint('auth', __name__)
api = Api(auth)
class LoginResource(Resource):
def get(self):
if current_user.is_active:
return {'username': current_user.full_name}
else:
abort(403, message="The user is not logged in")
def post(self):
email = request.form['email']
password = request.form['password']
user = (
db.session.query(Lecturer)
.filter(Lecturer.email == email)
.filter(Lecturer.password == password)
.first()
)
if not user:
abort(403, message="Invalid credentials")
login_user(user)
return {'username': current_user.full_name}
class LogoutResource(Resource):
def post(self):
logout_user()
return '', 204
api.add_resource(LoginResource, '/login')
api.add_resource(LogoutResource, '/logout')
|
import json
from flask import Blueprint, request
from flask.ext.login import current_user, logout_user, login_user
from flask.ext.restful import Api, Resource, abort, reqparse
from server.models import Lecturer, db
auth = Blueprint('auth', __name__)
api = Api(auth)
class LoginResource(Resource):
def get(self):
if current_user.is_active:
return {'username': current_user.full_name}
else:
abort(403, message="The user is not logged in")
def post(self):
argparser = reqparse.RequestParser()
argparser.add_argument('email', required=True)
argparser.add_argument('password', required=True)
args = argparser.parse_args()
email = args.email
password = args.password
user = (
db.session.query(Lecturer)
.filter(Lecturer.email == email)
.filter(Lecturer.password == password)
.first()
)
if not user:
abort(403, message="Invalid credentials")
login_user(user)
return {'username': current_user.full_name}
class LogoutResource(Resource):
def post(self):
logout_user()
return '', 204
api.add_resource(LoginResource, '/login')
api.add_resource(LogoutResource, '/logout')
|
Fix Login API implementation not parsing JSON POST data
|
Fix Login API implementation not parsing JSON POST data
|
Python
|
mit
|
MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS
|
python
|
## Code Before:
import json
from flask import Blueprint, request
from flask.ext.login import current_user, logout_user, login_user
from flask.ext.restful import Api, Resource, abort
from server.models import Lecturer, db
auth = Blueprint('auth', __name__)
api = Api(auth)
class LoginResource(Resource):
def get(self):
if current_user.is_active:
return {'username': current_user.full_name}
else:
abort(403, message="The user is not logged in")
def post(self):
email = request.form['email']
password = request.form['password']
user = (
db.session.query(Lecturer)
.filter(Lecturer.email == email)
.filter(Lecturer.password == password)
.first()
)
if not user:
abort(403, message="Invalid credentials")
login_user(user)
return {'username': current_user.full_name}
class LogoutResource(Resource):
def post(self):
logout_user()
return '', 204
api.add_resource(LoginResource, '/login')
api.add_resource(LogoutResource, '/logout')
## Instruction:
Fix Login API implementation not parsing JSON POST data
## Code After:
import json
from flask import Blueprint, request
from flask.ext.login import current_user, logout_user, login_user
from flask.ext.restful import Api, Resource, abort, reqparse
from server.models import Lecturer, db
auth = Blueprint('auth', __name__)
api = Api(auth)
class LoginResource(Resource):
def get(self):
if current_user.is_active:
return {'username': current_user.full_name}
else:
abort(403, message="The user is not logged in")
def post(self):
argparser = reqparse.RequestParser()
argparser.add_argument('email', required=True)
argparser.add_argument('password', required=True)
args = argparser.parse_args()
email = args.email
password = args.password
user = (
db.session.query(Lecturer)
.filter(Lecturer.email == email)
.filter(Lecturer.password == password)
.first()
)
if not user:
abort(403, message="Invalid credentials")
login_user(user)
return {'username': current_user.full_name}
class LogoutResource(Resource):
def post(self):
logout_user()
return '', 204
api.add_resource(LoginResource, '/login')
api.add_resource(LogoutResource, '/logout')
|
...
from flask import Blueprint, request
from flask.ext.login import current_user, logout_user, login_user
from flask.ext.restful import Api, Resource, abort, reqparse
from server.models import Lecturer, db
...
abort(403, message="The user is not logged in")
def post(self):
argparser = reqparse.RequestParser()
argparser.add_argument('email', required=True)
argparser.add_argument('password', required=True)
args = argparser.parse_args()
email = args.email
password = args.password
user = (
db.session.query(Lecturer)
.filter(Lecturer.email == email)
...
|
84b7c5cb4dc1a65dbff9615f3e73449a75cbede5
|
src/main/java/com/chrisdempewolf/responses/pin/Pins.java
|
src/main/java/com/chrisdempewolf/responses/pin/Pins.java
|
package com.chrisdempewolf.responses.pin;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class Pins implements Iterable<Pin> {
private final List<Pin> pin;
public Pins(Pin[] pin) {
this.pin = Arrays.asList(pin);
}
@Override
public Iterator<Pin> iterator() {
return pin.iterator();
}
public List<Pin> getPin() {
return pin;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pins pins = (Pins) o;
return !(pin != null ? !pin.equals(pins.pin) : pins.pin != null);
}
@Override
public int hashCode() {
return pin != null ? pin.hashCode() : 0;
}
@Override
public String toString() {
return "Pins{" +
"pin=" + pin +
'}';
}
}
|
package com.chrisdempewolf.responses.pin;
import com.google.gson.annotations.SerializedName;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class Pins implements Iterable<Pin> {
@SerializedName("data")
private final List<Pin> pins;
public Pins(Pin[] pins) {
this.pins = Arrays.asList(pins);
}
@Override
public Iterator<Pin> iterator() {
return pins.iterator();
}
public List<Pin> getPins() {
return pins;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pins pins = (Pins) o;
return !(this.pins != null ? !this.pins.equals(pins.pins) : pins.pins != null);
}
@Override
public int hashCode() {
return pins != null ? pins.hashCode() : 0;
}
@Override
public String toString() {
return "Pins{" +
"pins=" + pins +
'}';
}
}
|
Rename pin->pins; update serialized name
|
Rename pin->pins; update serialized name
|
Java
|
mit
|
dempe/pinterest-java,dempe/pinterest-java
|
java
|
## Code Before:
package com.chrisdempewolf.responses.pin;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class Pins implements Iterable<Pin> {
private final List<Pin> pin;
public Pins(Pin[] pin) {
this.pin = Arrays.asList(pin);
}
@Override
public Iterator<Pin> iterator() {
return pin.iterator();
}
public List<Pin> getPin() {
return pin;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pins pins = (Pins) o;
return !(pin != null ? !pin.equals(pins.pin) : pins.pin != null);
}
@Override
public int hashCode() {
return pin != null ? pin.hashCode() : 0;
}
@Override
public String toString() {
return "Pins{" +
"pin=" + pin +
'}';
}
}
## Instruction:
Rename pin->pins; update serialized name
## Code After:
package com.chrisdempewolf.responses.pin;
import com.google.gson.annotations.SerializedName;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class Pins implements Iterable<Pin> {
@SerializedName("data")
private final List<Pin> pins;
public Pins(Pin[] pins) {
this.pins = Arrays.asList(pins);
}
@Override
public Iterator<Pin> iterator() {
return pins.iterator();
}
public List<Pin> getPins() {
return pins;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pins pins = (Pins) o;
return !(this.pins != null ? !this.pins.equals(pins.pins) : pins.pins != null);
}
@Override
public int hashCode() {
return pins != null ? pins.hashCode() : 0;
}
@Override
public String toString() {
return "Pins{" +
"pins=" + pins +
'}';
}
}
|
// ... existing code ...
package com.chrisdempewolf.responses.pin;
import com.google.gson.annotations.SerializedName;
import java.util.Arrays;
import java.util.Iterator;
// ... modified code ...
import java.util.List;
public class Pins implements Iterable<Pin> {
@SerializedName("data")
private final List<Pin> pins;
public Pins(Pin[] pins) {
this.pins = Arrays.asList(pins);
}
@Override
public Iterator<Pin> iterator() {
return pins.iterator();
}
public List<Pin> getPins() {
return pins;
}
@Override
...
Pins pins = (Pins) o;
return !(this.pins != null ? !this.pins.equals(pins.pins) : pins.pins != null);
}
@Override
public int hashCode() {
return pins != null ? pins.hashCode() : 0;
}
@Override
public String toString() {
return "Pins{" +
"pins=" + pins +
'}';
}
}
// ... rest of the code ...
|
a307c5fc2555d282dfa6193cdbcfb2d15e185c0c
|
aq/parsers.py
|
aq/parsers.py
|
from collections import namedtuple
import collections
from six import string_types
from aq.errors import QueryParsingError
from aq.select_parser import select_stmt, ParseException
TableId = namedtuple('TableId', ('database', 'table', 'alias'))
QueryMetadata = namedtuple('QueryMetadata', ('tables',))
class SelectParser(object):
def __init__(self, options):
self.options = options
@staticmethod
def parse_query(query):
try:
parse_result = select_stmt.parseString(query, parseAll=True)
except ParseException as e:
raise QueryParsingError(e)
if not parse_result.table:
raise QueryParsingError('No table specified in query')
tables = [parse_table_id(tid) for tid in parse_result.table_ids]
parsed_query = concat(parse_result)
return parsed_query, QueryMetadata(tables=tables)
def parse_table_id(table_id):
database = table_id.database[0] if table_id.database else None
table = table_id.table[0] if table_id.table else None
alias = table_id.alias[0] if table_id.alias else None
return TableId(database, table, alias)
def flatten(nested_list):
for item in nested_list:
if isinstance(item, collections.Iterable) and not isinstance(item, string_types):
for nested_item in flatten(item):
yield nested_item
else:
yield item
def concat(tokens):
return ' '.join(flatten(tokens))
|
import collections
from collections import namedtuple
from six import string_types
from aq.errors import QueryParsingError
from aq.select_parser import select_stmt, ParseException
TableId = namedtuple('TableId', ('database', 'table', 'alias'))
QueryMetadata = namedtuple('QueryMetadata', ('tables',))
class SelectParser(object):
def __init__(self, options):
self.options = options
@staticmethod
def parse_query(query):
try:
parse_result = select_stmt.parseString(query, parseAll=True)
except ParseException as e:
raise QueryParsingError(e)
tables = [parse_table_id(tid) for tid in parse_result.table_ids]
parsed_query = concat(parse_result)
return parsed_query, QueryMetadata(tables=tables)
def parse_table_id(table_id):
database = table_id.database[0] if table_id.database else None
table = table_id.table[0] if table_id.table else None
alias = table_id.alias[0] if table_id.alias else None
return TableId(database, table, alias)
def flatten(nested_list):
for item in nested_list:
if isinstance(item, collections.Iterable) and not isinstance(item, string_types):
for nested_item in flatten(item):
yield nested_item
else:
yield item
def concat(tokens):
return ' '.join(flatten(tokens))
|
Allow query without table to run
|
Allow query without table to run
|
Python
|
mit
|
lebinh/aq
|
python
|
## Code Before:
from collections import namedtuple
import collections
from six import string_types
from aq.errors import QueryParsingError
from aq.select_parser import select_stmt, ParseException
TableId = namedtuple('TableId', ('database', 'table', 'alias'))
QueryMetadata = namedtuple('QueryMetadata', ('tables',))
class SelectParser(object):
def __init__(self, options):
self.options = options
@staticmethod
def parse_query(query):
try:
parse_result = select_stmt.parseString(query, parseAll=True)
except ParseException as e:
raise QueryParsingError(e)
if not parse_result.table:
raise QueryParsingError('No table specified in query')
tables = [parse_table_id(tid) for tid in parse_result.table_ids]
parsed_query = concat(parse_result)
return parsed_query, QueryMetadata(tables=tables)
def parse_table_id(table_id):
database = table_id.database[0] if table_id.database else None
table = table_id.table[0] if table_id.table else None
alias = table_id.alias[0] if table_id.alias else None
return TableId(database, table, alias)
def flatten(nested_list):
for item in nested_list:
if isinstance(item, collections.Iterable) and not isinstance(item, string_types):
for nested_item in flatten(item):
yield nested_item
else:
yield item
def concat(tokens):
return ' '.join(flatten(tokens))
## Instruction:
Allow query without table to run
## Code After:
import collections
from collections import namedtuple
from six import string_types
from aq.errors import QueryParsingError
from aq.select_parser import select_stmt, ParseException
TableId = namedtuple('TableId', ('database', 'table', 'alias'))
QueryMetadata = namedtuple('QueryMetadata', ('tables',))
class SelectParser(object):
def __init__(self, options):
self.options = options
@staticmethod
def parse_query(query):
try:
parse_result = select_stmt.parseString(query, parseAll=True)
except ParseException as e:
raise QueryParsingError(e)
tables = [parse_table_id(tid) for tid in parse_result.table_ids]
parsed_query = concat(parse_result)
return parsed_query, QueryMetadata(tables=tables)
def parse_table_id(table_id):
database = table_id.database[0] if table_id.database else None
table = table_id.table[0] if table_id.table else None
alias = table_id.alias[0] if table_id.alias else None
return TableId(database, table, alias)
def flatten(nested_list):
for item in nested_list:
if isinstance(item, collections.Iterable) and not isinstance(item, string_types):
for nested_item in flatten(item):
yield nested_item
else:
yield item
def concat(tokens):
return ' '.join(flatten(tokens))
|
...
import collections
from collections import namedtuple
from six import string_types
...
parse_result = select_stmt.parseString(query, parseAll=True)
except ParseException as e:
raise QueryParsingError(e)
tables = [parse_table_id(tid) for tid in parse_result.table_ids]
parsed_query = concat(parse_result)
...
|
aa6da3aa2b7d4781ec0c3d94ea68c11d75b76506
|
bonobo/structs/graphs.py
|
bonobo/structs/graphs.py
|
from bonobo.constants import BEGIN
class Graph:
"""
Represents a coherent directed acyclic graph of components.
"""
def __init__(self, *chain):
self.nodes = []
self.graph = {BEGIN: set()}
self.add_chain(*chain)
def outputs_of(self, idx, create=False):
if create and not idx in self.graph:
self.graph[idx] = set()
return self.graph[idx]
def add_node(self, c):
i = len(self.nodes)
self.nodes.append(c)
return i
def add_chain(self, *nodes, _input=BEGIN):
for node in nodes:
_next = self.add_node(node)
self.outputs_of(_input, create=True).add(_next)
_input = _next
def __len__(self):
return len(self.nodes)
|
from bonobo.constants import BEGIN
class Graph:
"""
Represents a coherent directed acyclic graph of components.
"""
def __init__(self, *chain):
self.nodes = []
self.graph = {BEGIN: set()}
self.add_chain(*chain)
def outputs_of(self, idx, create=False):
if create and not idx in self.graph:
self.graph[idx] = set()
return self.graph[idx]
def add_node(self, c):
i = len(self.nodes)
self.nodes.append(c)
return i
def add_chain(self, *nodes, _input=BEGIN, _output=None):
for node in nodes:
_next = self.add_node(node)
self.outputs_of(_input, create=True).add(_next)
_input = _next
if _output:
if not _output in self.nodes:
raise ValueError('Output not found.')
self.outputs_of(_input, create=True).add(self.nodes.index(_output))
return self
def __len__(self):
return len(self.nodes)
|
Allow to specify output of a chain in the Graph class.
|
Allow to specify output of a chain in the Graph class.
|
Python
|
apache-2.0
|
hartym/bonobo,hartym/bonobo,hartym/bonobo,python-bonobo/bonobo,python-bonobo/bonobo,python-bonobo/bonobo
|
python
|
## Code Before:
from bonobo.constants import BEGIN
class Graph:
"""
Represents a coherent directed acyclic graph of components.
"""
def __init__(self, *chain):
self.nodes = []
self.graph = {BEGIN: set()}
self.add_chain(*chain)
def outputs_of(self, idx, create=False):
if create and not idx in self.graph:
self.graph[idx] = set()
return self.graph[idx]
def add_node(self, c):
i = len(self.nodes)
self.nodes.append(c)
return i
def add_chain(self, *nodes, _input=BEGIN):
for node in nodes:
_next = self.add_node(node)
self.outputs_of(_input, create=True).add(_next)
_input = _next
def __len__(self):
return len(self.nodes)
## Instruction:
Allow to specify output of a chain in the Graph class.
## Code After:
from bonobo.constants import BEGIN
class Graph:
"""
Represents a coherent directed acyclic graph of components.
"""
def __init__(self, *chain):
self.nodes = []
self.graph = {BEGIN: set()}
self.add_chain(*chain)
def outputs_of(self, idx, create=False):
if create and not idx in self.graph:
self.graph[idx] = set()
return self.graph[idx]
def add_node(self, c):
i = len(self.nodes)
self.nodes.append(c)
return i
def add_chain(self, *nodes, _input=BEGIN, _output=None):
for node in nodes:
_next = self.add_node(node)
self.outputs_of(_input, create=True).add(_next)
_input = _next
if _output:
if not _output in self.nodes:
raise ValueError('Output not found.')
self.outputs_of(_input, create=True).add(self.nodes.index(_output))
return self
def __len__(self):
return len(self.nodes)
|
...
self.nodes.append(c)
return i
def add_chain(self, *nodes, _input=BEGIN, _output=None):
for node in nodes:
_next = self.add_node(node)
self.outputs_of(_input, create=True).add(_next)
_input = _next
if _output:
if not _output in self.nodes:
raise ValueError('Output not found.')
self.outputs_of(_input, create=True).add(self.nodes.index(_output))
return self
def __len__(self):
return len(self.nodes)
...
|
cc08fcbb513224aafe6c04143a150d1019c032ef
|
setup_py2exe.py
|
setup_py2exe.py
|
from distutils.core import setup
from glob import glob
import os
import py2exe
from setup import SSLYZE_SETUP
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
plugin_data_path = 'plugins\\data\\trust_stores'
plugin_data_files = []
for file in os.listdir(plugin_data_path):
file = os.path.join(plugin_data_path, file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
data_files.append((plugin_data_path, plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy()
sslyze_setup_py2exe.update(
{
'console' : ['sslyze.py'],
'data_files' : data_files,
'zipfile' : None,
'options' : {'py2exe':{
#'skip_archive': True,
'bundle_files': 1,
}}
}
)
setup(**sslyze_setup_py2exe)
|
from distutils.core import setup
from glob import glob
import os
import py2exe
from setup import SSLYZE_SETUP
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
plugin_data_files = []
for file in os.listdir('plugins\\data\\trust_stores'):
file = os.path.join('plugins\\data\\trust_stores', file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
data_files.append(('data\\trust_stores', plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy()
sslyze_setup_py2exe.update(
{
'console' : ['sslyze.py'],
'data_files' : data_files,
'zipfile' : None,
'options' : {'py2exe':{
#'skip_archive': True,
'bundle_files': 1,
}}
}
)
setup(**sslyze_setup_py2exe)
|
Fix trust stores paths for py2exe builds
|
Fix trust stores paths for py2exe builds
|
Python
|
agpl-3.0
|
nabla-c0d3/sslyze
|
python
|
## Code Before:
from distutils.core import setup
from glob import glob
import os
import py2exe
from setup import SSLYZE_SETUP
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
plugin_data_path = 'plugins\\data\\trust_stores'
plugin_data_files = []
for file in os.listdir(plugin_data_path):
file = os.path.join(plugin_data_path, file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
data_files.append((plugin_data_path, plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy()
sslyze_setup_py2exe.update(
{
'console' : ['sslyze.py'],
'data_files' : data_files,
'zipfile' : None,
'options' : {'py2exe':{
#'skip_archive': True,
'bundle_files': 1,
}}
}
)
setup(**sslyze_setup_py2exe)
## Instruction:
Fix trust stores paths for py2exe builds
## Code After:
from distutils.core import setup
from glob import glob
import os
import py2exe
from setup import SSLYZE_SETUP
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
plugin_data_files = []
for file in os.listdir('plugins\\data\\trust_stores'):
file = os.path.join('plugins\\data\\trust_stores', file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
data_files.append(('data\\trust_stores', plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy()
sslyze_setup_py2exe.update(
{
'console' : ['sslyze.py'],
'data_files' : data_files,
'zipfile' : None,
'options' : {'py2exe':{
#'skip_archive': True,
'bundle_files': 1,
}}
}
)
setup(**sslyze_setup_py2exe)
|
// ... existing code ...
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
plugin_data_files = []
for file in os.listdir('plugins\\data\\trust_stores'):
file = os.path.join('plugins\\data\\trust_stores', file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
data_files.append(('data\\trust_stores', plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy()
// ... rest of the code ...
|
1421dd89b74bf753cf0b52a5e6fe200d221922b5
|
pirx/utils.py
|
pirx/utils.py
|
import os
def setting(name):
return name.upper()
def path(subpath):
project_root = os.path.dirname(os.path.realpath(__file__))
return os.path.join(project_root, subpath)
|
import os
def setting(name):
return name.upper()
def path(subpath):
import __main__
project_root = os.path.dirname(os.path.realpath(__main__.__file__))
return os.path.join(project_root, subpath)
|
Fix 'path' function: use main's file as project root
|
Fix 'path' function: use main's file as project root
|
Python
|
mit
|
piotrekw/pirx
|
python
|
## Code Before:
import os
def setting(name):
return name.upper()
def path(subpath):
project_root = os.path.dirname(os.path.realpath(__file__))
return os.path.join(project_root, subpath)
## Instruction:
Fix 'path' function: use main's file as project root
## Code After:
import os
def setting(name):
return name.upper()
def path(subpath):
import __main__
project_root = os.path.dirname(os.path.realpath(__main__.__file__))
return os.path.join(project_root, subpath)
|
# ... existing code ...
return name.upper()
def path(subpath):
import __main__
project_root = os.path.dirname(os.path.realpath(__main__.__file__))
return os.path.join(project_root, subpath)
# ... rest of the code ...
|
23e1efbd24e317e6571d8436fc414dae9a3da767
|
salt/output/__init__.py
|
salt/output/__init__.py
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
|
Add function to outputter that returns the raw string to print
|
Add function to outputter that returns the raw string to print
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
python
|
## Code Before:
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
## Instruction:
Add function to outputter that returns the raw string to print
## Code After:
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'text_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
print(get_printout(out, opts)(data).rstrip())
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
if outputter == 'text_out':
out = 'txt'
else:
out = outputter
if out is None:
out = 'pprint'
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
|
...
if not out in outputters:
return outputters['pprint']
return outputters[out]
def out_format(data, out, opts=None):
'''
Return the formatted outputter string for the passed data
'''
return get_printout(out, opts)(data).rstrip()
...
|
6a9524502ebf3c04dede24fb937baec5c48342ef
|
widgy/contrib/widgy_mezzanine/search_indexes.py
|
widgy/contrib/widgy_mezzanine/search_indexes.py
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
Use a more realistic context to render pages for search
|
Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.
|
Python
|
apache-2.0
|
j00bar/django-widgy,j00bar/django-widgy,j00bar/django-widgy
|
python
|
## Code Before:
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
## Instruction:
Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.
## Code After:
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
# ... existing code ...
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
# ... rest of the code ...
|
5ed5855efe09c92efbf93dab5eb0b37325072381
|
opps/api/__init__.py
|
opps/api/__init__.py
|
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
method = getattr(request, request.method)
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
|
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
|
Fix method get on ApiKeyAuthentication
|
Fix method get on ApiKeyAuthentication
|
Python
|
mit
|
jeanmask/opps,opps/opps,YACOWS/opps,opps/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps
|
python
|
## Code Before:
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
method = getattr(request, request.method)
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
## Instruction:
Fix method get on ApiKeyAuthentication
## Code After:
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
|
# ... existing code ...
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
# ... rest of the code ...
|
19fb86f8b3a2307489f926d9d5d78bd84c6b05a1
|
Sketches/MH/TimerMixIn.py
|
Sketches/MH/TimerMixIn.py
|
from Axon.Component import component
from threading import Timer
class TimerMixIn(object):
def __init__(self, *argl, **argd):
super(TimerMixIn,self).__init__(*argl,**argd)
self.timer = None
self.timerSuccess = True
def startTimer(self, secs):
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
def cancelTimer(self):
if self.timer is not None and self.timer:
self.timer.cancel()
self.timer = None
self.timerSuccess = False
def timerRunning(self):
return self.timer is not None
def timerWasCancelled(self):
return not self.timerSuccess
def __handleTimerDone(self):
self.scheduler.wakeThread(self)
self.timer = None
self.timerSuccess = True
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
class TestComponent(TimerMixIn,component):
def __init__(self):
super(TestComponent,self).__init__()
def main(self):
count = 0
while True:
self.startTimer(0.5)
while self.timerRunning():
self.pause()
yield 1
self.send(count, "outbox")
count=count+1
Pipeline(TestComponent(),ConsoleEchoer()).run()
|
from Axon.Component import component
from threading import Timer
class TimerMixIn(object):
def __init__(self, *argl, **argd):
super(TimerMixIn,self).__init__(*argl,**argd)
self.timer = None
self.timerSuccess = True
def startTimer(self, secs):
if self.timer is not None:
self.cancelTimer()
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
def cancelTimer(self):
if self.timer is not None and self.timer:
self.timer.cancel()
self.timer = None
self.timerSuccess = False
def timerRunning(self):
return self.timer is not None
def timerWasCancelled(self):
return not self.timerSuccess
def __handleTimerDone(self):
self.scheduler.wakeThread(self)
self.timer = None
self.timerSuccess = True
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
class TestComponent(TimerMixIn,component):
def __init__(self):
super(TestComponent,self).__init__()
def main(self):
count = 0
while True:
self.startTimer(0.5)
while self.timerRunning():
self.pause()
yield 1
self.send(count, "outbox")
count=count+1
Pipeline(TestComponent(),ConsoleEchoer()).run()
|
Handle situation if timer is already running.
|
Handle situation if timer is already running.
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
python
|
## Code Before:
from Axon.Component import component
from threading import Timer
class TimerMixIn(object):
def __init__(self, *argl, **argd):
super(TimerMixIn,self).__init__(*argl,**argd)
self.timer = None
self.timerSuccess = True
def startTimer(self, secs):
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
def cancelTimer(self):
if self.timer is not None and self.timer:
self.timer.cancel()
self.timer = None
self.timerSuccess = False
def timerRunning(self):
return self.timer is not None
def timerWasCancelled(self):
return not self.timerSuccess
def __handleTimerDone(self):
self.scheduler.wakeThread(self)
self.timer = None
self.timerSuccess = True
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
class TestComponent(TimerMixIn,component):
def __init__(self):
super(TestComponent,self).__init__()
def main(self):
count = 0
while True:
self.startTimer(0.5)
while self.timerRunning():
self.pause()
yield 1
self.send(count, "outbox")
count=count+1
Pipeline(TestComponent(),ConsoleEchoer()).run()
## Instruction:
Handle situation if timer is already running.
## Code After:
from Axon.Component import component
from threading import Timer
class TimerMixIn(object):
def __init__(self, *argl, **argd):
super(TimerMixIn,self).__init__(*argl,**argd)
self.timer = None
self.timerSuccess = True
def startTimer(self, secs):
if self.timer is not None:
self.cancelTimer()
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
def cancelTimer(self):
if self.timer is not None and self.timer:
self.timer.cancel()
self.timer = None
self.timerSuccess = False
def timerRunning(self):
return self.timer is not None
def timerWasCancelled(self):
return not self.timerSuccess
def __handleTimerDone(self):
self.scheduler.wakeThread(self)
self.timer = None
self.timerSuccess = True
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
class TestComponent(TimerMixIn,component):
def __init__(self):
super(TestComponent,self).__init__()
def main(self):
count = 0
while True:
self.startTimer(0.5)
while self.timerRunning():
self.pause()
yield 1
self.send(count, "outbox")
count=count+1
Pipeline(TestComponent(),ConsoleEchoer()).run()
|
...
self.timerSuccess = True
def startTimer(self, secs):
if self.timer is not None:
self.cancelTimer()
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
...
|
3330678d6474a876e2d18edce995bd82ba027472
|
gittools.py
|
gittools.py
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
Handle symbolic refs in .git/HEAD
|
OTHER: Handle symbolic refs in .git/HEAD
|
Python
|
lgpl-2.1
|
xmms2/xmms2-stable,dreamerc/xmms2,oneman/xmms2-oneman,xmms2/xmms2-stable,chrippa/xmms2,theefer/xmms2,oneman/xmms2-oneman-old,six600110/xmms2,mantaraya36/xmms2-mantaraya36,mantaraya36/xmms2-mantaraya36,mantaraya36/xmms2-mantaraya36,chrippa/xmms2,theeternalsw0rd/xmms2,krad-radio/xmms2-krad,theeternalsw0rd/xmms2,oneman/xmms2-oneman,oneman/xmms2-oneman,six600110/xmms2,oneman/xmms2-oneman-old,krad-radio/xmms2-krad,theefer/xmms2,theefer/xmms2,mantaraya36/xmms2-mantaraya36,xmms2/xmms2-stable,oneman/xmms2-oneman,dreamerc/xmms2,xmms2/xmms2-stable,dreamerc/xmms2,chrippa/xmms2,theefer/xmms2,six600110/xmms2,oneman/xmms2-oneman,mantaraya36/xmms2-mantaraya36,theefer/xmms2,oneman/xmms2-oneman-old,chrippa/xmms2,theeternalsw0rd/xmms2,mantaraya36/xmms2-mantaraya36,dreamerc/xmms2,theeternalsw0rd/xmms2,theefer/xmms2,chrippa/xmms2,krad-radio/xmms2-krad,oneman/xmms2-oneman,dreamerc/xmms2,six600110/xmms2,six600110/xmms2,mantaraya36/xmms2-mantaraya36,oneman/xmms2-oneman,theeternalsw0rd/xmms2,xmms2/xmms2-stable,oneman/xmms2-oneman-old,xmms2/xmms2-stable,six600110/xmms2,oneman/xmms2-oneman-old,krad-radio/xmms2-krad,krad-radio/xmms2-krad,krad-radio/xmms2-krad,chrippa/xmms2,theefer/xmms2,theeternalsw0rd/xmms2
|
python
|
## Code Before:
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
## Instruction:
OTHER: Handle symbolic refs in .git/HEAD
## Code After:
import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
|
# ... existing code ...
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
# ... rest of the code ...
|
414f6e9174b8c7b88866319af19a5e36fcec643d
|
kk/admin/__init__.py
|
kk/admin/__init__.py
|
from django.contrib import admin
from kk.models import Hearing, Label, Introduction, Scenario, Comment
admin.site.register(Label)
admin.site.register(Hearing)
admin.site.register(Introduction)
admin.site.register(Scenario)
admin.site.register(Comment)
|
from django.contrib import admin
from kk import models
### Inlines
class IntroductionInline(admin.StackedInline):
model = models.Introduction
extra = 0
exclude = ["id"]
class ScenarioInline(admin.StackedInline):
model = models.Scenario
extra = 0
exclude = ["id"]
class HearingImageInline(admin.StackedInline):
model = models.HearingImage
extra = 0
class IntroductionImageInline(admin.StackedInline):
model = models.IntroductionImage
extra = 0
class ScenarioImageInline(admin.StackedInline):
model = models.ScenarioImage
extra = 0
### Admins
class HearingAdmin(admin.ModelAdmin):
inlines = [HearingImageInline, IntroductionInline, ScenarioInline]
class IntroductionAdmin(admin.ModelAdmin):
inlines = [IntroductionImageInline]
class ScenarioAdmin(admin.ModelAdmin):
inlines = [ScenarioImageInline]
### Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
admin.site.register(models.Introduction, IntroductionAdmin)
admin.site.register(models.Scenario, ScenarioAdmin)
|
Make the admin a little bit more palatable
|
Make the admin a little bit more palatable
Refs #25
|
Python
|
mit
|
stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,vikoivun/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi
|
python
|
## Code Before:
from django.contrib import admin
from kk.models import Hearing, Label, Introduction, Scenario, Comment
admin.site.register(Label)
admin.site.register(Hearing)
admin.site.register(Introduction)
admin.site.register(Scenario)
admin.site.register(Comment)
## Instruction:
Make the admin a little bit more palatable
Refs #25
## Code After:
from django.contrib import admin
from kk import models
### Inlines
class IntroductionInline(admin.StackedInline):
model = models.Introduction
extra = 0
exclude = ["id"]
class ScenarioInline(admin.StackedInline):
model = models.Scenario
extra = 0
exclude = ["id"]
class HearingImageInline(admin.StackedInline):
model = models.HearingImage
extra = 0
class IntroductionImageInline(admin.StackedInline):
model = models.IntroductionImage
extra = 0
class ScenarioImageInline(admin.StackedInline):
model = models.ScenarioImage
extra = 0
### Admins
class HearingAdmin(admin.ModelAdmin):
inlines = [HearingImageInline, IntroductionInline, ScenarioInline]
class IntroductionAdmin(admin.ModelAdmin):
inlines = [IntroductionImageInline]
class ScenarioAdmin(admin.ModelAdmin):
inlines = [ScenarioImageInline]
### Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
admin.site.register(models.Introduction, IntroductionAdmin)
admin.site.register(models.Scenario, ScenarioAdmin)
|
# ... existing code ...
from django.contrib import admin
from kk import models
### Inlines
class IntroductionInline(admin.StackedInline):
model = models.Introduction
extra = 0
exclude = ["id"]
class ScenarioInline(admin.StackedInline):
model = models.Scenario
extra = 0
exclude = ["id"]
class HearingImageInline(admin.StackedInline):
model = models.HearingImage
extra = 0
class IntroductionImageInline(admin.StackedInline):
model = models.IntroductionImage
extra = 0
class ScenarioImageInline(admin.StackedInline):
model = models.ScenarioImage
extra = 0
### Admins
class HearingAdmin(admin.ModelAdmin):
inlines = [HearingImageInline, IntroductionInline, ScenarioInline]
class IntroductionAdmin(admin.ModelAdmin):
inlines = [IntroductionImageInline]
class ScenarioAdmin(admin.ModelAdmin):
inlines = [ScenarioImageInline]
### Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
admin.site.register(models.Introduction, IntroductionAdmin)
admin.site.register(models.Scenario, ScenarioAdmin)
# ... rest of the code ...
|
4c0d88fe4d3fb935c5040fa39c5db10f74e6908f
|
pinax/stripe/utils.py
|
pinax/stripe/utils.py
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
Handle case when response is None
|
Handle case when response is None
|
Python
|
mit
|
pinax/django-stripe-payments
|
python
|
## Code Before:
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
## Instruction:
Handle case when response is None
## Code After:
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
# ... existing code ...
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
# ... rest of the code ...
|
14798847730a8746c1a7bad18a2f9e0fda7e0756
|
wagtail/tests/testapp/migrations/0006_sectionedrichtextpage_sectionedrichtextpagesection.py
|
wagtail/tests/testapp/migrations/0006_sectionedrichtextpage_sectionedrichtextpagesection.py
|
from __future__ import unicode_literals
from django.db import migrations, models
import modelcluster.fields
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0029_unicode_slugfield_dj19'),
('tests', '0005_customrichblockfieldpage_customrichtextfieldpage_defaultrichblockfieldpage_defaultrichtextfieldpage'),
]
operations = [
migrations.CreateModel(
name='SectionedRichTextPage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, to='wagtailcore.Page', serialize=False, auto_created=True, primary_key=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SectionedRichTextPageSection',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('sort_order', models.IntegerField(editable=False, null=True, blank=True)),
('body', wagtail.wagtailcore.fields.RichTextField()),
('page', modelcluster.fields.ParentalKey(related_name='sections', to='tests.SectionedRichTextPage')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
import modelcluster.fields
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0028_merge'),
('tests', '0005_customrichblockfieldpage_customrichtextfieldpage_defaultrichblockfieldpage_defaultrichtextfieldpage'),
]
operations = [
migrations.CreateModel(
name='SectionedRichTextPage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, to='wagtailcore.Page', serialize=False, auto_created=True, primary_key=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SectionedRichTextPageSection',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('sort_order', models.IntegerField(editable=False, null=True, blank=True)),
('body', wagtail.wagtailcore.fields.RichTextField()),
('page', modelcluster.fields.ParentalKey(related_name='sections', to='tests.SectionedRichTextPage')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
]
|
Fix test migration for Wagtail 1.5
|
Fix test migration for Wagtail 1.5
|
Python
|
bsd-3-clause
|
nilnvoid/wagtail,kurtrwall/wagtail,gasman/wagtail,zerolab/wagtail,kaedroho/wagtail,kurtrwall/wagtail,torchbox/wagtail,zerolab/wagtail,mikedingjan/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,kaedroho/wagtail,nutztherookie/wagtail,takeflight/wagtail,gasman/wagtail,kaedroho/wagtail,wagtail/wagtail,jnns/wagtail,nimasmi/wagtail,FlipperPA/wagtail,kurtw/wagtail,zerolab/wagtail,mixxorz/wagtail,FlipperPA/wagtail,Toshakins/wagtail,wagtail/wagtail,takeflight/wagtail,timorieber/wagtail,chrxr/wagtail,kurtw/wagtail,mixxorz/wagtail,nimasmi/wagtail,iansprice/wagtail,kurtrwall/wagtail,Toshakins/wagtail,zerolab/wagtail,mikedingjan/wagtail,rsalmaso/wagtail,FlipperPA/wagtail,nealtodd/wagtail,gasman/wagtail,iansprice/wagtail,mixxorz/wagtail,chrxr/wagtail,zerolab/wagtail,Toshakins/wagtail,takeflight/wagtail,thenewguy/wagtail,nealtodd/wagtail,timorieber/wagtail,iansprice/wagtail,thenewguy/wagtail,mixxorz/wagtail,jnns/wagtail,wagtail/wagtail,nutztherookie/wagtail,thenewguy/wagtail,kurtw/wagtail,mikedingjan/wagtail,takeflight/wagtail,nutztherookie/wagtail,chrxr/wagtail,torchbox/wagtail,nealtodd/wagtail,timorieber/wagtail,nealtodd/wagtail,jnns/wagtail,kaedroho/wagtail,gasman/wagtail,nimasmi/wagtail,rsalmaso/wagtail,Toshakins/wagtail,wagtail/wagtail,kaedroho/wagtail,chrxr/wagtail,thenewguy/wagtail,rsalmaso/wagtail,torchbox/wagtail,timorieber/wagtail,nilnvoid/wagtail,FlipperPA/wagtail,mixxorz/wagtail,kurtw/wagtail,thenewguy/wagtail,nimasmi/wagtail,nutztherookie/wagtail,iansprice/wagtail,kurtrwall/wagtail,nilnvoid/wagtail,torchbox/wagtail,jnns/wagtail,mikedingjan/wagtail,wagtail/wagtail,gasman/wagtail,rsalmaso/wagtail
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
import modelcluster.fields
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0029_unicode_slugfield_dj19'),
('tests', '0005_customrichblockfieldpage_customrichtextfieldpage_defaultrichblockfieldpage_defaultrichtextfieldpage'),
]
operations = [
migrations.CreateModel(
name='SectionedRichTextPage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, to='wagtailcore.Page', serialize=False, auto_created=True, primary_key=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SectionedRichTextPageSection',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('sort_order', models.IntegerField(editable=False, null=True, blank=True)),
('body', wagtail.wagtailcore.fields.RichTextField()),
('page', modelcluster.fields.ParentalKey(related_name='sections', to='tests.SectionedRichTextPage')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
]
## Instruction:
Fix test migration for Wagtail 1.5
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
import modelcluster.fields
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0028_merge'),
('tests', '0005_customrichblockfieldpage_customrichtextfieldpage_defaultrichblockfieldpage_defaultrichtextfieldpage'),
]
operations = [
migrations.CreateModel(
name='SectionedRichTextPage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, to='wagtailcore.Page', serialize=False, auto_created=True, primary_key=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SectionedRichTextPageSection',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('sort_order', models.IntegerField(editable=False, null=True, blank=True)),
('body', wagtail.wagtailcore.fields.RichTextField()),
('page', modelcluster.fields.ParentalKey(related_name='sections', to='tests.SectionedRichTextPage')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
]
|
# ... existing code ...
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0028_merge'),
('tests', '0005_customrichblockfieldpage_customrichtextfieldpage_defaultrichblockfieldpage_defaultrichtextfieldpage'),
]
# ... rest of the code ...
|
4c56c3454350251e32f83e8475fc4dfbe666cba5
|
src/main/java/pl/aetas/bakspad/presentation/LocaleAwareStringComparator.java
|
src/main/java/pl/aetas/bakspad/presentation/LocaleAwareStringComparator.java
|
package pl.aetas.bakspad.presentation;
import java.text.Collator;
import java.util.Comparator;
import java.util.Locale;
public class LocaleAwareStringComparator implements Comparator<String> {
private final Collator localeCollator;
public LocaleAwareStringComparator(Locale locale) {
localeCollator = Collator.getInstance(locale);
}
@Override
public int compare(String o1, String o2) {
return localeCollator.compare(o1, o2);
}
}
|
package pl.aetas.bakspad.presentation;
import java.text.Collator;
import java.util.Comparator;
import java.util.Locale;
public class LocaleAwareStringComparator implements Comparator<String> {
private final Collator localeCollator;
public LocaleAwareStringComparator(Locale locale) {
localeCollator = Collator.getInstance(locale);
}
@Override
public int compare(String o1, String o2) {
if (o1.isEmpty()) {
return 1;
}
if (o2.isEmpty()) {
return -1;
}
return localeCollator.compare(o1, o2);
}
}
|
Change for table columns comparator to put empty string at the end
|
Change for table columns comparator to put empty string at the end
|
Java
|
apache-2.0
|
maniekq/bakspad
|
java
|
## Code Before:
package pl.aetas.bakspad.presentation;
import java.text.Collator;
import java.util.Comparator;
import java.util.Locale;
public class LocaleAwareStringComparator implements Comparator<String> {
private final Collator localeCollator;
public LocaleAwareStringComparator(Locale locale) {
localeCollator = Collator.getInstance(locale);
}
@Override
public int compare(String o1, String o2) {
return localeCollator.compare(o1, o2);
}
}
## Instruction:
Change for table columns comparator to put empty string at the end
## Code After:
package pl.aetas.bakspad.presentation;
import java.text.Collator;
import java.util.Comparator;
import java.util.Locale;
public class LocaleAwareStringComparator implements Comparator<String> {
private final Collator localeCollator;
public LocaleAwareStringComparator(Locale locale) {
localeCollator = Collator.getInstance(locale);
}
@Override
public int compare(String o1, String o2) {
if (o1.isEmpty()) {
return 1;
}
if (o2.isEmpty()) {
return -1;
}
return localeCollator.compare(o1, o2);
}
}
|
...
@Override
public int compare(String o1, String o2) {
if (o1.isEmpty()) {
return 1;
}
if (o2.isEmpty()) {
return -1;
}
return localeCollator.compare(o1, o2);
}
}
...
|
b3eca2d7b8b7cf62f37e591b52c6b2a25ea2c259
|
src/protocolsupport/protocol/packet/middleimpl/clientbound/play/v_13/CustomPayload.java
|
src/protocolsupport/protocol/packet/middleimpl/clientbound/play/v_13/CustomPayload.java
|
package protocolsupport.protocol.packet.middleimpl.clientbound.play.v_13;
import protocolsupport.protocol.ConnectionImpl;
import protocolsupport.protocol.packet.ClientBoundPacket;
import protocolsupport.protocol.packet.middle.clientbound.play.MiddleCustomPayload;
import protocolsupport.protocol.packet.middleimpl.ClientBoundPacketData;
import protocolsupport.protocol.serializer.StringSerializer;
import protocolsupport.utils.recyclable.RecyclableCollection;
import protocolsupport.utils.recyclable.RecyclableSingletonList;
public class CustomPayload extends MiddleCustomPayload {
public CustomPayload(ConnectionImpl connection) {
super(connection);
}
@Override
public RecyclableCollection<ClientBoundPacketData> toData() {
ClientBoundPacketData serializer = ClientBoundPacketData.create(ClientBoundPacket.PLAY_CUSTOM_PAYLOAD_ID);
StringSerializer.writeString(serializer, connection.getVersion(), tag);
serializer.writeBytes(data);
return RecyclableSingletonList.create(serializer);
}
}
|
package protocolsupport.protocol.packet.middleimpl.clientbound.play.v_13;
import protocolsupport.protocol.ConnectionImpl;
import protocolsupport.protocol.packet.ClientBoundPacket;
import protocolsupport.protocol.packet.middle.clientbound.play.MiddleCustomPayload;
import protocolsupport.protocol.packet.middleimpl.ClientBoundPacketData;
import protocolsupport.protocol.serializer.MerchantDataSerializer;
import protocolsupport.protocol.serializer.StringSerializer;
import protocolsupport.protocol.typeremapper.legacy.LegacyCustomPayloadChannelName;
import protocolsupport.protocol.utils.ProtocolVersionsHelper;
import protocolsupport.utils.recyclable.RecyclableCollection;
import protocolsupport.utils.recyclable.RecyclableSingletonList;
public class CustomPayload extends MiddleCustomPayload {
public CustomPayload(ConnectionImpl connection) {
super(connection);
}
@Override
public RecyclableCollection<ClientBoundPacketData> toData() {
ClientBoundPacketData serializer = ClientBoundPacketData.create(ClientBoundPacket.PLAY_CUSTOM_PAYLOAD_ID);
StringSerializer.writeString(serializer, connection.getVersion(), tag);
switch (tag) {
case (LegacyCustomPayloadChannelName.MODERN_TRADER_LIST): {
String locale = cache.getAttributesCache().getLocale();
MerchantDataSerializer.writeMerchantData(
serializer, connection.getVersion(), locale,
MerchantDataSerializer.readMerchantData(data, ProtocolVersionsHelper.LATEST_PC, locale)
);
break;
}
default: {
serializer.writeBytes(data);
break;
}
}
return RecyclableSingletonList.create(serializer);
}
}
|
Fix traders in 1.13 and 1.13.1
|
Fix traders in 1.13 and 1.13.1
|
Java
|
agpl-3.0
|
ProtocolSupport/ProtocolSupport
|
java
|
## Code Before:
package protocolsupport.protocol.packet.middleimpl.clientbound.play.v_13;
import protocolsupport.protocol.ConnectionImpl;
import protocolsupport.protocol.packet.ClientBoundPacket;
import protocolsupport.protocol.packet.middle.clientbound.play.MiddleCustomPayload;
import protocolsupport.protocol.packet.middleimpl.ClientBoundPacketData;
import protocolsupport.protocol.serializer.StringSerializer;
import protocolsupport.utils.recyclable.RecyclableCollection;
import protocolsupport.utils.recyclable.RecyclableSingletonList;
public class CustomPayload extends MiddleCustomPayload {
public CustomPayload(ConnectionImpl connection) {
super(connection);
}
@Override
public RecyclableCollection<ClientBoundPacketData> toData() {
ClientBoundPacketData serializer = ClientBoundPacketData.create(ClientBoundPacket.PLAY_CUSTOM_PAYLOAD_ID);
StringSerializer.writeString(serializer, connection.getVersion(), tag);
serializer.writeBytes(data);
return RecyclableSingletonList.create(serializer);
}
}
## Instruction:
Fix traders in 1.13 and 1.13.1
## Code After:
package protocolsupport.protocol.packet.middleimpl.clientbound.play.v_13;
import protocolsupport.protocol.ConnectionImpl;
import protocolsupport.protocol.packet.ClientBoundPacket;
import protocolsupport.protocol.packet.middle.clientbound.play.MiddleCustomPayload;
import protocolsupport.protocol.packet.middleimpl.ClientBoundPacketData;
import protocolsupport.protocol.serializer.MerchantDataSerializer;
import protocolsupport.protocol.serializer.StringSerializer;
import protocolsupport.protocol.typeremapper.legacy.LegacyCustomPayloadChannelName;
import protocolsupport.protocol.utils.ProtocolVersionsHelper;
import protocolsupport.utils.recyclable.RecyclableCollection;
import protocolsupport.utils.recyclable.RecyclableSingletonList;
public class CustomPayload extends MiddleCustomPayload {
public CustomPayload(ConnectionImpl connection) {
super(connection);
}
@Override
public RecyclableCollection<ClientBoundPacketData> toData() {
ClientBoundPacketData serializer = ClientBoundPacketData.create(ClientBoundPacket.PLAY_CUSTOM_PAYLOAD_ID);
StringSerializer.writeString(serializer, connection.getVersion(), tag);
switch (tag) {
case (LegacyCustomPayloadChannelName.MODERN_TRADER_LIST): {
String locale = cache.getAttributesCache().getLocale();
MerchantDataSerializer.writeMerchantData(
serializer, connection.getVersion(), locale,
MerchantDataSerializer.readMerchantData(data, ProtocolVersionsHelper.LATEST_PC, locale)
);
break;
}
default: {
serializer.writeBytes(data);
break;
}
}
return RecyclableSingletonList.create(serializer);
}
}
|
// ... existing code ...
import protocolsupport.protocol.packet.ClientBoundPacket;
import protocolsupport.protocol.packet.middle.clientbound.play.MiddleCustomPayload;
import protocolsupport.protocol.packet.middleimpl.ClientBoundPacketData;
import protocolsupport.protocol.serializer.MerchantDataSerializer;
import protocolsupport.protocol.serializer.StringSerializer;
import protocolsupport.protocol.typeremapper.legacy.LegacyCustomPayloadChannelName;
import protocolsupport.protocol.utils.ProtocolVersionsHelper;
import protocolsupport.utils.recyclable.RecyclableCollection;
import protocolsupport.utils.recyclable.RecyclableSingletonList;
// ... modified code ...
public RecyclableCollection<ClientBoundPacketData> toData() {
ClientBoundPacketData serializer = ClientBoundPacketData.create(ClientBoundPacket.PLAY_CUSTOM_PAYLOAD_ID);
StringSerializer.writeString(serializer, connection.getVersion(), tag);
switch (tag) {
case (LegacyCustomPayloadChannelName.MODERN_TRADER_LIST): {
String locale = cache.getAttributesCache().getLocale();
MerchantDataSerializer.writeMerchantData(
serializer, connection.getVersion(), locale,
MerchantDataSerializer.readMerchantData(data, ProtocolVersionsHelper.LATEST_PC, locale)
);
break;
}
default: {
serializer.writeBytes(data);
break;
}
}
return RecyclableSingletonList.create(serializer);
}
// ... rest of the code ...
|
c083b0c24035620ec14db062fff8e62bd689c237
|
test/Driver/compilation_database.c
|
test/Driver/compilation_database.c
|
// RUN: %clang -MD -MP -c -x c %s -xc++ %s -Wall -MJ - 2>&1 | FileCheck %s
// RUN: not %clang -c -x c %s -MJ %s/non-existant 2>&1 | FileCheck --check-prefix=ERROR %s
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc", "[[SRC]]", "-c", "-Wall", "--target={{[^"]+}}"]},
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc++", "[[SRC]]", "-c", "-Wall", "--target={{[^"]+}}"]},
// ERROR: error: compilation database '{{.*}}/non-existant' could not be opened:
int main(void) {
return 0;
}
|
// RUN: %clang -MD -MP --sysroot=somewhere -c -x c %s -xc++ %s -Wall -MJ - 2>&1 | FileCheck %s
// RUN: not %clang -c -x c %s -MJ %s/non-existant 2>&1 | FileCheck --check-prefix=ERROR %s
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc", "[[SRC]]", "--sysroot=somewhere", "-c", "-Wall", "--target={{[^"]+}}"]},
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc++", "[[SRC]]", "--sysroot=somewhere", "-c", "-Wall", "--target={{[^"]+}}"]},
// ERROR: error: compilation database '{{.*}}/non-existant' could not be opened:
int main(void) {
return 0;
}
|
Make test case slightly more robust by explicitly passing --sysroot. Otherwise it would change when DEFAULT_SYSROOT is provided.
|
Make test case slightly more robust by explicitly passing --sysroot.
Otherwise it would change when DEFAULT_SYSROOT is provided.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@288823 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang -MD -MP -c -x c %s -xc++ %s -Wall -MJ - 2>&1 | FileCheck %s
// RUN: not %clang -c -x c %s -MJ %s/non-existant 2>&1 | FileCheck --check-prefix=ERROR %s
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc", "[[SRC]]", "-c", "-Wall", "--target={{[^"]+}}"]},
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc++", "[[SRC]]", "-c", "-Wall", "--target={{[^"]+}}"]},
// ERROR: error: compilation database '{{.*}}/non-existant' could not be opened:
int main(void) {
return 0;
}
## Instruction:
Make test case slightly more robust by explicitly passing --sysroot.
Otherwise it would change when DEFAULT_SYSROOT is provided.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@288823 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang -MD -MP --sysroot=somewhere -c -x c %s -xc++ %s -Wall -MJ - 2>&1 | FileCheck %s
// RUN: not %clang -c -x c %s -MJ %s/non-existant 2>&1 | FileCheck --check-prefix=ERROR %s
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc", "[[SRC]]", "--sysroot=somewhere", "-c", "-Wall", "--target={{[^"]+}}"]},
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc++", "[[SRC]]", "--sysroot=somewhere", "-c", "-Wall", "--target={{[^"]+}}"]},
// ERROR: error: compilation database '{{.*}}/non-existant' could not be opened:
int main(void) {
return 0;
}
|
...
// RUN: %clang -MD -MP --sysroot=somewhere -c -x c %s -xc++ %s -Wall -MJ - 2>&1 | FileCheck %s
// RUN: not %clang -c -x c %s -MJ %s/non-existant 2>&1 | FileCheck --check-prefix=ERROR %s
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc", "[[SRC]]", "--sysroot=somewhere", "-c", "-Wall", "--target={{[^"]+}}"]},
// CHECK: { "directory": "[[CWD:[^"]+]]", "file": "[[SRC:[^"]+[/|\\]compilation_database.c]]", "output": "compilation_database.o", "arguments": ["{{[^"]*}}clang{{[^"]*}}", "-xc++", "[[SRC]]", "--sysroot=somewhere", "-c", "-Wall", "--target={{[^"]+}}"]},
// ERROR: error: compilation database '{{.*}}/non-existant' could not be opened:
int main(void) {
...
|
7a4d878dda0b9b947a5991be63183e247ad4e022
|
grammpy_transforms/UnreachableSymbolsRemove/unreachableSymbolsRemove.py
|
grammpy_transforms/UnreachableSymbolsRemove/unreachableSymbolsRemove.py
|
from grammpy import Grammar
from copy import copy
def remove_unreachable_symbols(grammar: Grammar, transform_grammar=False) -> Grammar:
# Copy if required
if transform_grammar is False: grammar = copy(grammar)
raise NotImplementedError()
|
from grammpy import Grammar
from grammpy.exceptions import NotNonterminalException
from copy import copy
class StartSymbolNotSpecifiedException(Exception):
pass
def remove_unreachable_symbols(grammar: Grammar, transform_grammar=False) -> Grammar:
# Copy if required
if transform_grammar is False: grammar = copy(grammar)
# Check if start symbol is set
if not grammar.start_isSet(): raise StartSymbolNotSpecifiedException()
# Create process sets
reachable = {grammar.start_get()}
rules = grammar.rules()
# Begin iterations
while True:
# Create sets for current iteration
active = reachable.copy()
processedRules = []
# Loop rest of rules
for rule in rules:
# If left part of rule already in reachable symbols
if rule.fromSymbol in reachable:
# Set symbols as reachable
processedRules.append(rule)
for symbol in rule.right: active.add(symbol)
# End of rules loop
# Remove processed rules
for item in processedRules: rules.remove(item)
# If current and previous iterations are same, than end iterations
if active == reachable: break
reachable = active
# End of iterations
# Set symbols to remove
allSymbols = set(grammar.nonterms()).union(set(x.s for x in grammar.terms()))
for symbol in allSymbols.difference(reachable):
try:
grammar.remove_nonterm(symbol)
except NotNonterminalException:
grammar.remove_term(symbol)
return grammar
|
Implement removing of unreachable symbols
|
Implement removing of unreachable symbols
|
Python
|
mit
|
PatrikValkovic/grammpy
|
python
|
## Code Before:
from grammpy import Grammar
from copy import copy
def remove_unreachable_symbols(grammar: Grammar, transform_grammar=False) -> Grammar:
# Copy if required
if transform_grammar is False: grammar = copy(grammar)
raise NotImplementedError()
## Instruction:
Implement removing of unreachable symbols
## Code After:
from grammpy import Grammar
from grammpy.exceptions import NotNonterminalException
from copy import copy
class StartSymbolNotSpecifiedException(Exception):
pass
def remove_unreachable_symbols(grammar: Grammar, transform_grammar=False) -> Grammar:
# Copy if required
if transform_grammar is False: grammar = copy(grammar)
# Check if start symbol is set
if not grammar.start_isSet(): raise StartSymbolNotSpecifiedException()
# Create process sets
reachable = {grammar.start_get()}
rules = grammar.rules()
# Begin iterations
while True:
# Create sets for current iteration
active = reachable.copy()
processedRules = []
# Loop rest of rules
for rule in rules:
# If left part of rule already in reachable symbols
if rule.fromSymbol in reachable:
# Set symbols as reachable
processedRules.append(rule)
for symbol in rule.right: active.add(symbol)
# End of rules loop
# Remove processed rules
for item in processedRules: rules.remove(item)
# If current and previous iterations are same, than end iterations
if active == reachable: break
reachable = active
# End of iterations
# Set symbols to remove
allSymbols = set(grammar.nonterms()).union(set(x.s for x in grammar.terms()))
for symbol in allSymbols.difference(reachable):
try:
grammar.remove_nonterm(symbol)
except NotNonterminalException:
grammar.remove_term(symbol)
return grammar
|
...
from grammpy import Grammar
from grammpy.exceptions import NotNonterminalException
from copy import copy
class StartSymbolNotSpecifiedException(Exception):
pass
def remove_unreachable_symbols(grammar: Grammar, transform_grammar=False) -> Grammar:
# Copy if required
if transform_grammar is False: grammar = copy(grammar)
# Check if start symbol is set
if not grammar.start_isSet(): raise StartSymbolNotSpecifiedException()
# Create process sets
reachable = {grammar.start_get()}
rules = grammar.rules()
# Begin iterations
while True:
# Create sets for current iteration
active = reachable.copy()
processedRules = []
# Loop rest of rules
for rule in rules:
# If left part of rule already in reachable symbols
if rule.fromSymbol in reachable:
# Set symbols as reachable
processedRules.append(rule)
for symbol in rule.right: active.add(symbol)
# End of rules loop
# Remove processed rules
for item in processedRules: rules.remove(item)
# If current and previous iterations are same, than end iterations
if active == reachable: break
reachable = active
# End of iterations
# Set symbols to remove
allSymbols = set(grammar.nonterms()).union(set(x.s for x in grammar.terms()))
for symbol in allSymbols.difference(reachable):
try:
grammar.remove_nonterm(symbol)
except NotNonterminalException:
grammar.remove_term(symbol)
return grammar
...
|
7dc223c2d9ad2a5f55ffb1c214ae02e23aa96f92
|
jg-backend/interface/jg-rest-ws/src/main/java/org/jgrades/rest/RestDocsConfig.java
|
jg-backend/interface/jg-rest-ws/src/main/java/org/jgrades/rest/RestDocsConfig.java
|
package org.jgrades.rest;
import org.joda.time.DateTime;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2
public class RestDocsConfig {
@Bean
public Docket jGradesRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.select()
.apis(RequestHandlerSelectors.any())
.paths(PathSelectors.any())
.build()
.pathMapping("/")
.directModelSubstitute(DateTime.class, Long.class)
.enableUrlTemplating(true);
}
}
|
package org.jgrades.rest;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.joda.time.LocalTime;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2
public class RestDocsConfig {
@Bean
public Docket jGradesRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.select()
.apis(RequestHandlerSelectors.any())
.paths(PathSelectors.any())
.build()
.pathMapping("/")
.directModelSubstitute(DateTime.class, Long.class)
.directModelSubstitute(LocalDate.class, String.class)
.directModelSubstitute(LocalTime.class, String.class)
.enableUrlTemplating(true);
}
}
|
Improve joda objects model in rest
|
Improve joda objects model in rest
|
Java
|
apache-2.0
|
jgrades/jgrades,jgrades/jgrades,jgrades/jgrades
|
java
|
## Code Before:
package org.jgrades.rest;
import org.joda.time.DateTime;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2
public class RestDocsConfig {
@Bean
public Docket jGradesRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.select()
.apis(RequestHandlerSelectors.any())
.paths(PathSelectors.any())
.build()
.pathMapping("/")
.directModelSubstitute(DateTime.class, Long.class)
.enableUrlTemplating(true);
}
}
## Instruction:
Improve joda objects model in rest
## Code After:
package org.jgrades.rest;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.joda.time.LocalTime;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2
public class RestDocsConfig {
@Bean
public Docket jGradesRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.select()
.apis(RequestHandlerSelectors.any())
.paths(PathSelectors.any())
.build()
.pathMapping("/")
.directModelSubstitute(DateTime.class, Long.class)
.directModelSubstitute(LocalDate.class, String.class)
.directModelSubstitute(LocalTime.class, String.class)
.enableUrlTemplating(true);
}
}
|
...
package org.jgrades.rest;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.joda.time.LocalTime;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.PathSelectors;
...
.build()
.pathMapping("/")
.directModelSubstitute(DateTime.class, Long.class)
.directModelSubstitute(LocalDate.class, String.class)
.directModelSubstitute(LocalTime.class, String.class)
.enableUrlTemplating(true);
}
...
|
0ae513c9ea37e04deb3c72d0c61ca480a8c62266
|
lpthw/ex24.py
|
lpthw/ex24.py
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
Comment for a slick little trick.
|
Comment for a slick little trick.
|
Python
|
mit
|
jaredmanning/learning,jaredmanning/learning
|
python
|
## Code Before:
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
## Instruction:
Comment for a slick little trick.
## Code After:
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
...
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
...
|
8771bbdba5b10a3b9fab2822eccdec64d221edb4
|
catalog/admin.py
|
catalog/admin.py
|
from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
|
from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
class AuthorsInstanceInline(admin.TabularInline):
model = Book
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
inlines = [AuthorsInstanceInline]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
list_display = ('book', 'status', 'due_back', 'id')
|
Configure BookInstance list view and add an inline listing
|
Configure BookInstance list view and add an inline listing
|
Python
|
bsd-3-clause
|
pavlenk0/my-catalog,pavlenk0/my-catalog
|
python
|
## Code Before:
from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
## Instruction:
Configure BookInstance list view and add an inline listing
## Code After:
from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
class AuthorsInstanceInline(admin.TabularInline):
model = Book
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
inlines = [AuthorsInstanceInline]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
list_display = ('book', 'status', 'due_back', 'id')
|
# ... existing code ...
admin.site.register(Language)
class AuthorsInstanceInline(admin.TabularInline):
model = Book
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
inlines = [AuthorsInstanceInline]
# Register the admin class with the associated model
# ... modified code ...
'fields': ('status', 'due_back')
}),
)
list_display = ('book', 'status', 'due_back', 'id')
# ... rest of the code ...
|
528b421efa1c787b6a58d76c863d3bb4a6f6e847
|
src/main/java/com/corwinjv/mobtotems/blocks/tiles/TotemLogic/WolfLogic.java
|
src/main/java/com/corwinjv/mobtotems/blocks/tiles/TotemLogic/WolfLogic.java
|
package com.corwinjv.mobtotems.blocks.tiles.TotemLogic;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
/**
* Created by CorwinJV on 1/25/2017.
*/
public class WolfLogic extends TotemLogic {
@Override
public List<ItemStack> getCost() {
List<ItemStack> cost = new ArrayList<>();
return cost;
}
@Nonnull
@Override
public EffectType getEffectType() {
return EffectType.EFFECT;
}
}
|
package com.corwinjv.mobtotems.blocks.tiles.TotemLogic;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
/**
* Created by CorwinJV on 1/25/2017.
*/
public class WolfLogic extends TotemLogic {
@Override
public List<ItemStack> getCost() {
List<ItemStack> cost = new ArrayList<>();
cost.add(new ItemStack(Items.BONE, 4, 0));
return cost;
}
@Nonnull
@Override
public EffectType getEffectType() {
return EffectType.EFFECT;
}
}
|
Add cost to wolf totem
|
Add cost to wolf totem
|
Java
|
mit
|
CorwinJV/MobTotems
|
java
|
## Code Before:
package com.corwinjv.mobtotems.blocks.tiles.TotemLogic;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
/**
* Created by CorwinJV on 1/25/2017.
*/
public class WolfLogic extends TotemLogic {
@Override
public List<ItemStack> getCost() {
List<ItemStack> cost = new ArrayList<>();
return cost;
}
@Nonnull
@Override
public EffectType getEffectType() {
return EffectType.EFFECT;
}
}
## Instruction:
Add cost to wolf totem
## Code After:
package com.corwinjv.mobtotems.blocks.tiles.TotemLogic;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
/**
* Created by CorwinJV on 1/25/2017.
*/
public class WolfLogic extends TotemLogic {
@Override
public List<ItemStack> getCost() {
List<ItemStack> cost = new ArrayList<>();
cost.add(new ItemStack(Items.BONE, 4, 0));
return cost;
}
@Nonnull
@Override
public EffectType getEffectType() {
return EffectType.EFFECT;
}
}
|
# ... existing code ...
package com.corwinjv.mobtotems.blocks.tiles.TotemLogic;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
# ... modified code ...
@Override
public List<ItemStack> getCost() {
List<ItemStack> cost = new ArrayList<>();
cost.add(new ItemStack(Items.BONE, 4, 0));
return cost;
}
# ... rest of the code ...
|
36213a31a1870cf38ec0ce3d208c6a2072e2b133
|
acapi/tests/test_client.py
|
acapi/tests/test_client.py
|
import os
import requests
import requests_mock
import unittest
from .. import Client
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
"""Tests the Acquia Cloud API client class."""
req = None
"""
def setup(self, ):
" ""
Set up the tests with the mock requests handler.
" ""
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
"""
def test_find_credentials(self, m):
"""
Tests finding the credentials in environment variables
"""
os.environ['ACQUIA_CLOUD_API_USER'] = 'user'
os.environ['ACQUIA_CLOUD_API_TOKEN'] = 'token'
client = Client(cache=None)
(user, token) = client._Client__find_credentials()
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
def test_user(self, m):
email = '[email protected]'
m.register_uri('GET',
'https://cloudapi.acquia.com/v1/me.json',
json={"authenticated_as": email}
)
client = Client(email, 'token')
user = client.user().get()
self.assertEqual(user['authenticated_as'], email)
if __name__ == '__main__':
unittest.main()
|
import os
import requests
import requests_mock
import unittest
from .. import Client
from ..exceptions import AcquiaCloudException
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
"""Tests the Acquia Cloud API client class."""
req = None
"""
def setup(self, ):
" ""
Set up the tests with the mock requests handler.
" ""
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
"""
def test_find_credentials(self, m):
"""
Tests finding the credentials in environment variables
"""
os.environ['ACQUIA_CLOUD_API_USER'] = 'user'
os.environ['ACQUIA_CLOUD_API_TOKEN'] = 'token'
client = Client(cache=None)
(user, token) = client._Client__find_credentials()
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
def test_find_credentials_none_set(self, m):
"""
Tests finding the credentials in environment variables with empty credentials
"""
os.environ['ACQUIA_CLOUD_API_USER'] = ''
os.environ['ACQUIA_CLOUD_API_TOKEN'] = ''
with self.assertRaises(AcquiaCloudException) as cm:
client = Client(cache=None)
self.assertEqual(str(cm.exception), 'Credentials not provided')
def test_user(self, m):
email = '[email protected]'
m.register_uri('GET',
'https://cloudapi.acquia.com/v1/me.json',
json={"authenticated_as": email}
)
client = Client(email, 'token')
user = client.user().get()
self.assertEqual(user['authenticated_as'], email)
if __name__ == '__main__':
unittest.main()
|
Add test for failing to find credentials
|
Add test for failing to find credentials
|
Python
|
mit
|
skwashd/python-acquia-cloud
|
python
|
## Code Before:
import os
import requests
import requests_mock
import unittest
from .. import Client
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
"""Tests the Acquia Cloud API client class."""
req = None
"""
def setup(self, ):
" ""
Set up the tests with the mock requests handler.
" ""
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
"""
def test_find_credentials(self, m):
"""
Tests finding the credentials in environment variables
"""
os.environ['ACQUIA_CLOUD_API_USER'] = 'user'
os.environ['ACQUIA_CLOUD_API_TOKEN'] = 'token'
client = Client(cache=None)
(user, token) = client._Client__find_credentials()
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
def test_user(self, m):
email = '[email protected]'
m.register_uri('GET',
'https://cloudapi.acquia.com/v1/me.json',
json={"authenticated_as": email}
)
client = Client(email, 'token')
user = client.user().get()
self.assertEqual(user['authenticated_as'], email)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test for failing to find credentials
## Code After:
import os
import requests
import requests_mock
import unittest
from .. import Client
from ..exceptions import AcquiaCloudException
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
"""Tests the Acquia Cloud API client class."""
req = None
"""
def setup(self, ):
" ""
Set up the tests with the mock requests handler.
" ""
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
"""
def test_find_credentials(self, m):
"""
Tests finding the credentials in environment variables
"""
os.environ['ACQUIA_CLOUD_API_USER'] = 'user'
os.environ['ACQUIA_CLOUD_API_TOKEN'] = 'token'
client = Client(cache=None)
(user, token) = client._Client__find_credentials()
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
def test_find_credentials_none_set(self, m):
"""
Tests finding the credentials in environment variables with empty credentials
"""
os.environ['ACQUIA_CLOUD_API_USER'] = ''
os.environ['ACQUIA_CLOUD_API_TOKEN'] = ''
with self.assertRaises(AcquiaCloudException) as cm:
client = Client(cache=None)
self.assertEqual(str(cm.exception), 'Credentials not provided')
def test_user(self, m):
email = '[email protected]'
m.register_uri('GET',
'https://cloudapi.acquia.com/v1/me.json',
json={"authenticated_as": email}
)
client = Client(email, 'token')
user = client.user().get()
self.assertEqual(user['authenticated_as'], email)
if __name__ == '__main__':
unittest.main()
|
...
import unittest
from .. import Client
from ..exceptions import AcquiaCloudException
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
...
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
def test_find_credentials_none_set(self, m):
"""
Tests finding the credentials in environment variables with empty credentials
"""
os.environ['ACQUIA_CLOUD_API_USER'] = ''
os.environ['ACQUIA_CLOUD_API_TOKEN'] = ''
with self.assertRaises(AcquiaCloudException) as cm:
client = Client(cache=None)
self.assertEqual(str(cm.exception), 'Credentials not provided')
def test_user(self, m):
email = '[email protected]'
m.register_uri('GET',
...
|
90f0107d69caab084348ef0e4b38d5a33f14039f
|
dataprep-backend-common/src/main/java/org/talend/dataprep/exception/TDPException.java
|
dataprep-backend-common/src/main/java/org/talend/dataprep/exception/TDPException.java
|
package org.talend.dataprep.exception;
import java.io.IOException;
import java.io.Writer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
class TDPException extends RuntimeException {
private static final Logger LOGGER = LoggerFactory.getLogger(TDPException.class);
private final Messages code;
private final String message;
private Throwable cause;
public TDPException(Messages code, String message, Throwable cause) {
super(message, cause);
this.code = code;
this.message = message;
this.cause = cause;
}
public void writeTo(Writer writer) {
try {
JsonGenerator generator = (new JsonFactory()).createGenerator(writer);
generator.writeStartObject();
{
generator.writeStringField("code", code.getProduct() + '_' + code.getGroup() + '_' + code.getCode()); //$NON-NLS-1$
generator.writeStringField("message", message); //$NON-NLS-1$
if (cause != null) {
generator.writeStringField("cause", cause.getMessage()); //$NON-NLS-1$
}
}
generator.writeEndObject();
generator.flush();
} catch (IOException e) {
LOGGER.error("Unable to write exception to " + writer + ".", e);
}
}
}
|
package org.talend.dataprep.exception;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
class TDPException extends RuntimeException {
private static final Logger LOGGER = LoggerFactory.getLogger(TDPException.class);
private final Messages code;
private final String message;
private Throwable cause;
public TDPException(Messages code, String message, Throwable cause) {
super(message, cause);
this.code = code;
this.message = message;
this.cause = cause;
}
public void writeTo(Writer writer) {
try {
JsonGenerator generator = (new JsonFactory()).createGenerator(writer);
generator.writeStartObject();
{
generator.writeStringField("code", code.getProduct() + '_' + code.getGroup() + '_' + code.getCode()); //$NON-NLS-1$
generator.writeStringField("message", message); //$NON-NLS-1$
if (cause != null) {
generator.writeStringField("cause", cause.getMessage()); //$NON-NLS-1$
final StringWriter details = new StringWriter();
cause.printStackTrace(new PrintWriter(details));
generator.writeStringField("details", details.toString()); //$NON-NLS-1$
}
}
generator.writeEndObject();
generator.flush();
} catch (IOException e) {
LOGGER.error("Unable to write exception to " + writer + ".", e);
}
}
}
|
Include exception cause's stacktrace in JSON in case of error.
|
[Backend] Include exception cause's stacktrace in JSON in case of error.
|
Java
|
apache-2.0
|
Talend/data-prep,Talend/data-prep,Talend/data-prep
|
java
|
## Code Before:
package org.talend.dataprep.exception;
import java.io.IOException;
import java.io.Writer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
class TDPException extends RuntimeException {
private static final Logger LOGGER = LoggerFactory.getLogger(TDPException.class);
private final Messages code;
private final String message;
private Throwable cause;
public TDPException(Messages code, String message, Throwable cause) {
super(message, cause);
this.code = code;
this.message = message;
this.cause = cause;
}
public void writeTo(Writer writer) {
try {
JsonGenerator generator = (new JsonFactory()).createGenerator(writer);
generator.writeStartObject();
{
generator.writeStringField("code", code.getProduct() + '_' + code.getGroup() + '_' + code.getCode()); //$NON-NLS-1$
generator.writeStringField("message", message); //$NON-NLS-1$
if (cause != null) {
generator.writeStringField("cause", cause.getMessage()); //$NON-NLS-1$
}
}
generator.writeEndObject();
generator.flush();
} catch (IOException e) {
LOGGER.error("Unable to write exception to " + writer + ".", e);
}
}
}
## Instruction:
[Backend] Include exception cause's stacktrace in JSON in case of error.
## Code After:
package org.talend.dataprep.exception;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
class TDPException extends RuntimeException {
private static final Logger LOGGER = LoggerFactory.getLogger(TDPException.class);
private final Messages code;
private final String message;
private Throwable cause;
public TDPException(Messages code, String message, Throwable cause) {
super(message, cause);
this.code = code;
this.message = message;
this.cause = cause;
}
public void writeTo(Writer writer) {
try {
JsonGenerator generator = (new JsonFactory()).createGenerator(writer);
generator.writeStartObject();
{
generator.writeStringField("code", code.getProduct() + '_' + code.getGroup() + '_' + code.getCode()); //$NON-NLS-1$
generator.writeStringField("message", message); //$NON-NLS-1$
if (cause != null) {
generator.writeStringField("cause", cause.getMessage()); //$NON-NLS-1$
final StringWriter details = new StringWriter();
cause.printStackTrace(new PrintWriter(details));
generator.writeStringField("details", details.toString()); //$NON-NLS-1$
}
}
generator.writeEndObject();
generator.flush();
} catch (IOException e) {
LOGGER.error("Unable to write exception to " + writer + ".", e);
}
}
}
|
// ... existing code ...
package org.talend.dataprep.exception;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import org.slf4j.Logger;
// ... modified code ...
generator.writeStringField("message", message); //$NON-NLS-1$
if (cause != null) {
generator.writeStringField("cause", cause.getMessage()); //$NON-NLS-1$
final StringWriter details = new StringWriter();
cause.printStackTrace(new PrintWriter(details));
generator.writeStringField("details", details.toString()); //$NON-NLS-1$
}
}
generator.writeEndObject();
// ... rest of the code ...
|
eb4308243a45769bd9b21bf02d9e63d1511923e0
|
src/test/java/org/example/prototype/example/ExampleQuery.java
|
src/test/java/org/example/prototype/example/ExampleQuery.java
|
package org.example.prototype.example;
import org.example.domain.Customer;
import org.example.domain.Order;
import org.example.domain.query.QCustomer;
import org.example.domain.query.QOrder;
import org.junit.Test;
import java.util.List;
/**
*/
public class ExampleQuery {
@Test
public void test() {
QCustomer qc = new QCustomer(5);
Customer rob = new QCustomer(2)
.select("id, name")
.status.equalTo(Customer.Status.GOOD)
.name.ilike("Asd")
.name.isNull()
.billingAddress.country.code.equalTo("NZ")
.contacts.email.endsWith("@foo.com")
.findUnique();
}
@Test
public void testOrder() {
List<Order> orders = new QOrder()
.customer.name.ilike("rob")
.orderBy()
.customer.name.asc()
.orderDate.asc()
.findList();
// .shippingAddress.city.ieq("auckla")
// .shippingAddress.country.code.eq("NZ")
// .status.equalTo(Order.Status.APPROVED)
// .orderDate.after(Date.valueOf("2015-01-20"))
// .findList();
}
}
|
package org.example.prototype.example;
import org.example.domain.Customer;
import org.example.domain.Order;
import org.example.domain.query.QCustomer;
import org.example.domain.query.QOrder;
import org.junit.Test;
import java.util.List;
/**
*/
public class ExampleQuery {
@Test
public void test() {
QCustomer qc = new QCustomer(5);
Customer rob = new QCustomer(2)
.select("id, name")
.id.greaterThan(42)
.status.equalTo(Customer.Status.GOOD)
.name.ilike("Asd")
.name.isNull()
.billingAddress.country.code.equalTo("NZ")
.contacts.email.endsWith("@foo.com")
.findUnique();
}
@Test
public void testOrder() {
List<Order> orders = new QOrder()
.customer.name.ilike("rob")
.orderBy()
.customer.name.asc()
.orderDate.asc()
.findList();
// .shippingAddress.city.ieq("auckla")
// .shippingAddress.country.code.eq("NZ")
// .status.equalTo(Order.Status.APPROVED)
// .orderDate.after(Date.valueOf("2015-01-20"))
// .findList();
}
}
|
Add @TypeQueryBean annotation into generation
|
Add @TypeQueryBean annotation into generation
|
Java
|
apache-2.0
|
ebean-orm/avaje-ebeanorm-typequery-generator,ebean-orm/avaje-ebeanorm-typequery-generator
|
java
|
## Code Before:
package org.example.prototype.example;
import org.example.domain.Customer;
import org.example.domain.Order;
import org.example.domain.query.QCustomer;
import org.example.domain.query.QOrder;
import org.junit.Test;
import java.util.List;
/**
*/
public class ExampleQuery {
@Test
public void test() {
QCustomer qc = new QCustomer(5);
Customer rob = new QCustomer(2)
.select("id, name")
.status.equalTo(Customer.Status.GOOD)
.name.ilike("Asd")
.name.isNull()
.billingAddress.country.code.equalTo("NZ")
.contacts.email.endsWith("@foo.com")
.findUnique();
}
@Test
public void testOrder() {
List<Order> orders = new QOrder()
.customer.name.ilike("rob")
.orderBy()
.customer.name.asc()
.orderDate.asc()
.findList();
// .shippingAddress.city.ieq("auckla")
// .shippingAddress.country.code.eq("NZ")
// .status.equalTo(Order.Status.APPROVED)
// .orderDate.after(Date.valueOf("2015-01-20"))
// .findList();
}
}
## Instruction:
Add @TypeQueryBean annotation into generation
## Code After:
package org.example.prototype.example;
import org.example.domain.Customer;
import org.example.domain.Order;
import org.example.domain.query.QCustomer;
import org.example.domain.query.QOrder;
import org.junit.Test;
import java.util.List;
/**
*/
public class ExampleQuery {
@Test
public void test() {
QCustomer qc = new QCustomer(5);
Customer rob = new QCustomer(2)
.select("id, name")
.id.greaterThan(42)
.status.equalTo(Customer.Status.GOOD)
.name.ilike("Asd")
.name.isNull()
.billingAddress.country.code.equalTo("NZ")
.contacts.email.endsWith("@foo.com")
.findUnique();
}
@Test
public void testOrder() {
List<Order> orders = new QOrder()
.customer.name.ilike("rob")
.orderBy()
.customer.name.asc()
.orderDate.asc()
.findList();
// .shippingAddress.city.ieq("auckla")
// .shippingAddress.country.code.eq("NZ")
// .status.equalTo(Order.Status.APPROVED)
// .orderDate.after(Date.valueOf("2015-01-20"))
// .findList();
}
}
|
// ... existing code ...
Customer rob = new QCustomer(2)
.select("id, name")
.id.greaterThan(42)
.status.equalTo(Customer.Status.GOOD)
.name.ilike("Asd")
.name.isNull()
// ... rest of the code ...
|
34274289f0cbfafbb1d762cad38a7225873d6850
|
matches/admin.py
|
matches/admin.py
|
from django.contrib import admin
from .models import Match
from .models import Tip
def delete_tips(modeladmin, request, queryset):
for match in queryset:
tips = Tip.object.filter(match = match)
for tip in tips:
tip.score = 0
tip.scoring_field = ""
tip.is_score_calculated = False
delete_tips.delete_tips = "Delete calculated scores for tips for these matches"
class MatchAdmin(admin.ModelAdmin):
actions = [make_published]
admin.site.register(Match, MatchAdmin)
admin.site.register(Tip)
|
from django.contrib import admin
from .models import Match
from .models import Tip
def delete_tips(modeladmin, request, queryset):
for match in queryset:
tips = Tip.object.filter(match = match)
for tip in tips:
tip.score = 0
tip.scoring_field = ""
tip.is_score_calculated = False
delete_tips.delete_tips = "Delete calculated scores for tips for these matches"
class MatchAdmin(admin.ModelAdmin):
actions = [delete_tips]
admin.site.register(Match, MatchAdmin)
admin.site.register(Tip)
|
Add action to zero out tips for given match
|
Add action to zero out tips for given match
|
Python
|
mit
|
leventebakos/football-ech,leventebakos/football-ech
|
python
|
## Code Before:
from django.contrib import admin
from .models import Match
from .models import Tip
def delete_tips(modeladmin, request, queryset):
for match in queryset:
tips = Tip.object.filter(match = match)
for tip in tips:
tip.score = 0
tip.scoring_field = ""
tip.is_score_calculated = False
delete_tips.delete_tips = "Delete calculated scores for tips for these matches"
class MatchAdmin(admin.ModelAdmin):
actions = [make_published]
admin.site.register(Match, MatchAdmin)
admin.site.register(Tip)
## Instruction:
Add action to zero out tips for given match
## Code After:
from django.contrib import admin
from .models import Match
from .models import Tip
def delete_tips(modeladmin, request, queryset):
for match in queryset:
tips = Tip.object.filter(match = match)
for tip in tips:
tip.score = 0
tip.scoring_field = ""
tip.is_score_calculated = False
delete_tips.delete_tips = "Delete calculated scores for tips for these matches"
class MatchAdmin(admin.ModelAdmin):
actions = [delete_tips]
admin.site.register(Match, MatchAdmin)
admin.site.register(Tip)
|
# ... existing code ...
delete_tips.delete_tips = "Delete calculated scores for tips for these matches"
class MatchAdmin(admin.ModelAdmin):
actions = [delete_tips]
admin.site.register(Match, MatchAdmin)
admin.site.register(Tip)
# ... rest of the code ...
|
db99f77edfb7318ee3b4a443a98c837611054515
|
utils/fields.py
|
utils/fields.py
|
import json
from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField
class JSONPrettyField(JSONField):
def __init__(self, *args, **kwargs):
self.__indent = kwargs.pop('indent', 2)
super().__init__(*args, **kwargs)
def prepare_value(self, value):
if isinstance(value, InvalidJSONInput):
return value
return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False)
|
import json
from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField
from django.forms import ValidationError
class JSONPrettyField(JSONField):
def __init__(self, *args, **kwargs):
self.__indent = kwargs.pop('indent', 2)
self.__dict_only = kwargs.pop('dict_only', False)
self.__list_only = kwargs.pop('list_only', False)
if self.__dict_only and self.__list_only:
raise ValueError('Only one of dict_only or list_only can be True')
super().__init__(*args, **kwargs)
def prepare_value(self, value):
if isinstance(value, InvalidJSONInput):
return value
return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False)
def validate(self, value):
if self.__dict_only and not isinstance(value, dict):
raise ValidationError('{} is not of type dict'.format(value))
if self.__list_only and not isinstance(value, list):
raise ValidationError('{} is not of type list'.format(value))
return value
|
Add list_only and dict_only to JSONPrettyField
|
Add list_only and dict_only to JSONPrettyField
|
Python
|
mit
|
bulv1ne/django-utils,bulv1ne/django-utils
|
python
|
## Code Before:
import json
from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField
class JSONPrettyField(JSONField):
def __init__(self, *args, **kwargs):
self.__indent = kwargs.pop('indent', 2)
super().__init__(*args, **kwargs)
def prepare_value(self, value):
if isinstance(value, InvalidJSONInput):
return value
return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False)
## Instruction:
Add list_only and dict_only to JSONPrettyField
## Code After:
import json
from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField
from django.forms import ValidationError
class JSONPrettyField(JSONField):
def __init__(self, *args, **kwargs):
self.__indent = kwargs.pop('indent', 2)
self.__dict_only = kwargs.pop('dict_only', False)
self.__list_only = kwargs.pop('list_only', False)
if self.__dict_only and self.__list_only:
raise ValueError('Only one of dict_only or list_only can be True')
super().__init__(*args, **kwargs)
def prepare_value(self, value):
if isinstance(value, InvalidJSONInput):
return value
return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False)
def validate(self, value):
if self.__dict_only and not isinstance(value, dict):
raise ValidationError('{} is not of type dict'.format(value))
if self.__list_only and not isinstance(value, list):
raise ValidationError('{} is not of type list'.format(value))
return value
|
// ... existing code ...
import json
from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField
from django.forms import ValidationError
class JSONPrettyField(JSONField):
def __init__(self, *args, **kwargs):
self.__indent = kwargs.pop('indent', 2)
self.__dict_only = kwargs.pop('dict_only', False)
self.__list_only = kwargs.pop('list_only', False)
if self.__dict_only and self.__list_only:
raise ValueError('Only one of dict_only or list_only can be True')
super().__init__(*args, **kwargs)
def prepare_value(self, value):
// ... modified code ...
if isinstance(value, InvalidJSONInput):
return value
return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False)
def validate(self, value):
if self.__dict_only and not isinstance(value, dict):
raise ValidationError('{} is not of type dict'.format(value))
if self.__list_only and not isinstance(value, list):
raise ValidationError('{} is not of type list'.format(value))
return value
// ... rest of the code ...
|
41b1d36a9d5fcb0dd2f6da53a7a0d4604b21a0eb
|
tests/query_test/test_scan_range_lengths.py
|
tests/query_test/test_scan_range_lengths.py
|
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
elif vector.get_value('table_format').compression_codec != 'none':
pytest.xfail(reason='IMPALA-122')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
Fix IMPALA-122: Lzo scanner with small scan ranges.
|
Fix IMPALA-122: Lzo scanner with small scan ranges.
Change-Id: I5226fd1a1aa368f5b291b78ad371363057ef574e
Reviewed-on: http://gerrit.ent.cloudera.com:8080/140
Reviewed-by: Skye Wanderman-Milne <[email protected]>
Reviewed-by: Nong Li <[email protected]>
Tested-by: Nong Li <[email protected]>
|
Python
|
apache-2.0
|
michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala
|
python
|
## Code Before:
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
elif vector.get_value('table_format').compression_codec != 'none':
pytest.xfail(reason='IMPALA-122')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
## Instruction:
Fix IMPALA-122: Lzo scanner with small scan ranges.
Change-Id: I5226fd1a1aa368f5b291b78ad371363057ef574e
Reviewed-on: http://gerrit.ent.cloudera.com:8080/140
Reviewed-by: Skye Wanderman-Milne <[email protected]>
Reviewed-by: Nong Li <[email protected]>
Tested-by: Nong Li <[email protected]>
## Code After:
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
// ... existing code ...
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
// ... rest of the code ...
|
ce3668fd60044c328af41b358a66a0e6741db1fb
|
boot/espressif/hal/include/esp_log.h
|
boot/espressif/hal/include/esp_log.h
|
/*
* Copyright (c) 2021 Espressif Systems (Shanghai) Co., Ltd.
*
* SPDX-License-Identifier: Apache-2.0
*/
#pragma once
#include <mcuboot_config/mcuboot_logging.h>
#define ESP_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR(fmt, ##__VA_ARGS__)
#define ESP_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN(fmt, ##__VA_ARGS__)
#define ESP_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF(fmt, ##__VA_ARGS__)
#define ESP_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG(fmt, ##__VA_ARGS__)
|
/*
* SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#pragma once
#include <mcuboot_config/mcuboot_logging.h>
#define ESP_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGV(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGV(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
|
Use "TAG" field from ESP_LOG* macros from IDF libraries
|
espressif: Use "TAG" field from ESP_LOG* macros from IDF libraries
Signed-off-by: Gustavo Henrique Nihei <[email protected]>
|
C
|
apache-2.0
|
runtimeco/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot,runtimeco/mcuboot,ATmobica/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,ATmobica/mcuboot
|
c
|
## Code Before:
/*
* Copyright (c) 2021 Espressif Systems (Shanghai) Co., Ltd.
*
* SPDX-License-Identifier: Apache-2.0
*/
#pragma once
#include <mcuboot_config/mcuboot_logging.h>
#define ESP_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR(fmt, ##__VA_ARGS__)
#define ESP_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN(fmt, ##__VA_ARGS__)
#define ESP_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF(fmt, ##__VA_ARGS__)
#define ESP_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG(fmt, ##__VA_ARGS__)
## Instruction:
espressif: Use "TAG" field from ESP_LOG* macros from IDF libraries
Signed-off-by: Gustavo Henrique Nihei <[email protected]>
## Code After:
/*
* SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#pragma once
#include <mcuboot_config/mcuboot_logging.h>
#define ESP_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGV(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGV(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
|
# ... existing code ...
/*
* SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
# ... modified code ...
#pragma once
#include <mcuboot_config/mcuboot_logging.h>
#define ESP_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_LOGV(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGE(tag, fmt, ...) MCUBOOT_LOG_ERR("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGW(tag, fmt, ...) MCUBOOT_LOG_WRN("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGI(tag, fmt, ...) MCUBOOT_LOG_INF("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGD(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
#define ESP_EARLY_LOGV(tag, fmt, ...) MCUBOOT_LOG_DBG("[%s] " fmt, tag, ##__VA_ARGS__)
# ... rest of the code ...
|
8e53b65b5f28a02f8ee980b9f53a57e7cdd077bd
|
main.py
|
main.py
|
import places
from character import Character
import actions
import options
from multiple_choice import MultipleChoice
def combat(character):
"""
takes in a character, returns outcome of fight
"""
return actions.Attack(character.person).get_outcome(character)
def main():
"""
The goal is to have the main function operate as follows:
Set up the initial state
Display the initial message
Display the initial options
Choose an action
Get an outcome
Display results of the outcomes
Outcome changes game state
"""
character = Character()
character.place = places.tavern
choices = MultipleChoice()
options.set_initial_actions(choices)
print("\n---The St. George Game---\n")
print("You are in a tavern. The local assassins hate you.")
while character.alive and character.alone and not character.lose:
action = choices.choose_action()
if not character.threatened or action.combat_action:
outcome = action.get_outcome(character)
else:
outcome = combat(character)
if not character.alive:
break
outcome.execute()
options.add_actions(choices, character, outcome)
choices.generate_actions(character)
if __name__ == "__main__":
main()
|
import places
from character import Character
import actions
import options
from multiple_choice import MultipleChoice
def main():
"""
The goal is to have the main function operate as follows:
Set up the initial state
Display the initial message
Display the initial options
Choose an action
Get an outcome
Display results of the outcomes
Outcome changes game state
"""
character = Character(place=places.tavern)
choices = MultipleChoice()
options.set_initial_actions(choices)
print("\n---The St. George Game---\n")
print("You are in a tavern. The local assassins hate you.")
while character.alive and character.alone and not character.lose:
action = choices.choose_action()
if not character.threatened or action.combat_action:
outcome = action.get_outcome(character)
else:
outcome = actions.Attack(character.person).get_outcome(character)
outcome.execute()
options.add_actions(choices, character, outcome)
choices.generate_actions(character)
if __name__ == "__main__":
main()
|
Refactor combat code to be more concise
|
Refactor combat code to be more concise
|
Python
|
apache-2.0
|
SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame
|
python
|
## Code Before:
import places
from character import Character
import actions
import options
from multiple_choice import MultipleChoice
def combat(character):
"""
takes in a character, returns outcome of fight
"""
return actions.Attack(character.person).get_outcome(character)
def main():
"""
The goal is to have the main function operate as follows:
Set up the initial state
Display the initial message
Display the initial options
Choose an action
Get an outcome
Display results of the outcomes
Outcome changes game state
"""
character = Character()
character.place = places.tavern
choices = MultipleChoice()
options.set_initial_actions(choices)
print("\n---The St. George Game---\n")
print("You are in a tavern. The local assassins hate you.")
while character.alive and character.alone and not character.lose:
action = choices.choose_action()
if not character.threatened or action.combat_action:
outcome = action.get_outcome(character)
else:
outcome = combat(character)
if not character.alive:
break
outcome.execute()
options.add_actions(choices, character, outcome)
choices.generate_actions(character)
if __name__ == "__main__":
main()
## Instruction:
Refactor combat code to be more concise
## Code After:
import places
from character import Character
import actions
import options
from multiple_choice import MultipleChoice
def main():
"""
The goal is to have the main function operate as follows:
Set up the initial state
Display the initial message
Display the initial options
Choose an action
Get an outcome
Display results of the outcomes
Outcome changes game state
"""
character = Character(place=places.tavern)
choices = MultipleChoice()
options.set_initial_actions(choices)
print("\n---The St. George Game---\n")
print("You are in a tavern. The local assassins hate you.")
while character.alive and character.alone and not character.lose:
action = choices.choose_action()
if not character.threatened or action.combat_action:
outcome = action.get_outcome(character)
else:
outcome = actions.Attack(character.person).get_outcome(character)
outcome.execute()
options.add_actions(choices, character, outcome)
choices.generate_actions(character)
if __name__ == "__main__":
main()
|
...
import actions
import options
from multiple_choice import MultipleChoice
def main():
...
Display results of the outcomes
Outcome changes game state
"""
character = Character(place=places.tavern)
choices = MultipleChoice()
options.set_initial_actions(choices)
print("\n---The St. George Game---\n")
...
if not character.threatened or action.combat_action:
outcome = action.get_outcome(character)
else:
outcome = actions.Attack(character.person).get_outcome(character)
outcome.execute()
options.add_actions(choices, character, outcome)
choices.generate_actions(character)
...
|
2ed4bea13d44007854be4260cc20341ae8b71420
|
genomics/sparkcaller/src/main/java/com/github/sparkcaller/variantdiscovery/VQSRRecalibrationApplier.java
|
genomics/sparkcaller/src/main/java/com/github/sparkcaller/variantdiscovery/VQSRRecalibrationApplier.java
|
package com.github.sparkcaller.variantdiscovery;
import com.github.sparkcaller.BaseGATKProgram;
import com.github.sparkcaller.Utils;
import scala.Tuple2;
import java.io.File;
public class VQSRRecalibrationApplier extends BaseGATKProgram {
public VQSRRecalibrationApplier(String pathToReference, String extraArgs, String coresPerNode) {
super("ApplyRecalibration", extraArgs);
setReference(pathToReference);
addArgument("-nt", coresPerNode);
}
public static String constructRecalFilename(String vcfToRecalibrate, String mode) {
return Utils.removeExtenstion(vcfToRecalibrate, "vcf") + "-recalib-" + mode + ".vcf";
}
public File applyRecalibration(File vcfToRecalibrate, Tuple2<File, File> recalibrationFiles, String mode) {
File recalibrationFile = recalibrationFiles._1;
File tranchesFile = recalibrationFiles._2;
changeArgument("-mode", mode);
changeArgument("-input", vcfToRecalibrate.getPath());
changeArgument("-tranchesFile", tranchesFile.getPath());
changeArgument("-recalFile", recalibrationFile.getPath());
File recalibratedOutput = new File(constructRecalFilename(vcfToRecalibrate.getPath(), mode));
setOutputFile(recalibratedOutput.getPath());
return recalibratedOutput;
}
}
|
package com.github.sparkcaller.variantdiscovery;
import com.github.sparkcaller.BaseGATKProgram;
import com.github.sparkcaller.Utils;
import scala.Tuple2;
import java.io.File;
/*
* Applies the recalibration targets generated by VQSRTarget generator.
*
* See:
* https://software.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_variantrecalibration_ApplyRecalibration.php
*
* For more documentation.
*/
public class VQSRRecalibrationApplier extends BaseGATKProgram {
public VQSRRecalibrationApplier(String pathToReference, String extraArgs, String coresPerNode) {
super("ApplyRecalibration", extraArgs);
setReference(pathToReference);
addArgument("-nt", coresPerNode);
}
public static String constructRecalFilename(String vcfToRecalibrate, String mode) {
return Utils.removeExtenstion(vcfToRecalibrate, "vcf") + "-recalib-" + mode + ".vcf";
}
public File applyRecalibration(File vcfToRecalibrate, Tuple2<File, File> recalibrationFiles, String mode) {
File recalibrationFile = recalibrationFiles._1;
File tranchesFile = recalibrationFiles._2;
changeArgument("-mode", mode);
changeArgument("-input", vcfToRecalibrate.getPath());
changeArgument("-tranchesFile", tranchesFile.getPath());
changeArgument("-recalFile", recalibrationFile.getPath());
File recalibratedOutput = new File(constructRecalFilename(vcfToRecalibrate.getPath(), mode));
setOutputFile(recalibratedOutput.getPath());
return recalibratedOutput;
}
}
|
Add a breif explaination of what the class does.
|
[Genomics]: Add a breif explaination of what the class does.
|
Java
|
mit
|
UNINETT/daas-apps,UNINETT/daas-apps
|
java
|
## Code Before:
package com.github.sparkcaller.variantdiscovery;
import com.github.sparkcaller.BaseGATKProgram;
import com.github.sparkcaller.Utils;
import scala.Tuple2;
import java.io.File;
public class VQSRRecalibrationApplier extends BaseGATKProgram {
public VQSRRecalibrationApplier(String pathToReference, String extraArgs, String coresPerNode) {
super("ApplyRecalibration", extraArgs);
setReference(pathToReference);
addArgument("-nt", coresPerNode);
}
public static String constructRecalFilename(String vcfToRecalibrate, String mode) {
return Utils.removeExtenstion(vcfToRecalibrate, "vcf") + "-recalib-" + mode + ".vcf";
}
public File applyRecalibration(File vcfToRecalibrate, Tuple2<File, File> recalibrationFiles, String mode) {
File recalibrationFile = recalibrationFiles._1;
File tranchesFile = recalibrationFiles._2;
changeArgument("-mode", mode);
changeArgument("-input", vcfToRecalibrate.getPath());
changeArgument("-tranchesFile", tranchesFile.getPath());
changeArgument("-recalFile", recalibrationFile.getPath());
File recalibratedOutput = new File(constructRecalFilename(vcfToRecalibrate.getPath(), mode));
setOutputFile(recalibratedOutput.getPath());
return recalibratedOutput;
}
}
## Instruction:
[Genomics]: Add a breif explaination of what the class does.
## Code After:
package com.github.sparkcaller.variantdiscovery;
import com.github.sparkcaller.BaseGATKProgram;
import com.github.sparkcaller.Utils;
import scala.Tuple2;
import java.io.File;
/*
* Applies the recalibration targets generated by VQSRTarget generator.
*
* See:
* https://software.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_variantrecalibration_ApplyRecalibration.php
*
* For more documentation.
*/
public class VQSRRecalibrationApplier extends BaseGATKProgram {
public VQSRRecalibrationApplier(String pathToReference, String extraArgs, String coresPerNode) {
super("ApplyRecalibration", extraArgs);
setReference(pathToReference);
addArgument("-nt", coresPerNode);
}
public static String constructRecalFilename(String vcfToRecalibrate, String mode) {
return Utils.removeExtenstion(vcfToRecalibrate, "vcf") + "-recalib-" + mode + ".vcf";
}
public File applyRecalibration(File vcfToRecalibrate, Tuple2<File, File> recalibrationFiles, String mode) {
File recalibrationFile = recalibrationFiles._1;
File tranchesFile = recalibrationFiles._2;
changeArgument("-mode", mode);
changeArgument("-input", vcfToRecalibrate.getPath());
changeArgument("-tranchesFile", tranchesFile.getPath());
changeArgument("-recalFile", recalibrationFile.getPath());
File recalibratedOutput = new File(constructRecalFilename(vcfToRecalibrate.getPath(), mode));
setOutputFile(recalibratedOutput.getPath());
return recalibratedOutput;
}
}
|
// ... existing code ...
import scala.Tuple2;
import java.io.File;
/*
* Applies the recalibration targets generated by VQSRTarget generator.
*
* See:
* https://software.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_variantrecalibration_ApplyRecalibration.php
*
* For more documentation.
*/
public class VQSRRecalibrationApplier extends BaseGATKProgram {
public VQSRRecalibrationApplier(String pathToReference, String extraArgs, String coresPerNode) {
// ... rest of the code ...
|
e7cc9a045b72673583b9a813d83f5e7a280979cf
|
trunk/MutabilityDetector/trunk/MutabilityDetector/src/main/benchmarks/org/mutabilitydetector/benchmarks/settermethod/ImmutableButSetsPrivateFieldOfInstanceOfSelf.java
|
trunk/MutabilityDetector/trunk/MutabilityDetector/src/main/benchmarks/org/mutabilitydetector/benchmarks/settermethod/ImmutableButSetsPrivateFieldOfInstanceOfSelf.java
|
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.benchmarks.settermethod;
@SuppressWarnings("unused")
public class ImmutableButSetsPrivateFieldOfInstanceOfSelf {
private int myField = 0;
private ImmutableButSetsPrivateFieldOfInstanceOfSelf fieldOfSelfType = null;
public ImmutableButSetsPrivateFieldOfInstanceOfSelf setPrivateFieldOnInstanceOfSelf() {
ImmutableButSetsPrivateFieldOfInstanceOfSelf i = new ImmutableButSetsPrivateFieldOfInstanceOfSelf();
this.hashCode();
i.myField = 10;
this.hashCode();
i.myField = 11;
return i;
}
}
class MutableBySettingFieldOnThisInstance {
@SuppressWarnings("unused")
private int myField = 0;
public void setMyField(int newMyField) {
this.myField = newMyField;
}
}
|
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.benchmarks.settermethod;
@SuppressWarnings("unused")
public class ImmutableButSetsPrivateFieldOfInstanceOfSelf {
private int myField = 0;
private ImmutableButSetsPrivateFieldOfInstanceOfSelf fieldOfSelfType = null;
public ImmutableButSetsPrivateFieldOfInstanceOfSelf setPrivateFieldOnInstanceOfSelf() {
ImmutableButSetsPrivateFieldOfInstanceOfSelf i = new ImmutableButSetsPrivateFieldOfInstanceOfSelf();
this.hashCode();
i.myField = 10;
this.hashCode();
i.myField = 11;
return i;
}
}
class MutableBySettingFieldOnThisInstanceAndOtherInstance {
@SuppressWarnings("unused")
private int myField = 0;
public void setMyField(int newMyField, MutableBySettingFieldOnThisInstanceAndOtherInstance otherInstance) {
this.myField = newMyField;
otherInstance.myField = 42;
}
}
|
Update benchmark for setting fields of this type.
|
Update benchmark for setting fields of this type.
git-svn-id: ed609ce04ec9e3c0bc25e071e87814dd6d976548@269 c7a0535c-eda6-11de-83d8-6d5adf01d787
|
Java
|
apache-2.0
|
MutabilityDetector/MutabilityDetector,MutabilityDetector/MutabilityDetector
|
java
|
## Code Before:
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.benchmarks.settermethod;
@SuppressWarnings("unused")
public class ImmutableButSetsPrivateFieldOfInstanceOfSelf {
private int myField = 0;
private ImmutableButSetsPrivateFieldOfInstanceOfSelf fieldOfSelfType = null;
public ImmutableButSetsPrivateFieldOfInstanceOfSelf setPrivateFieldOnInstanceOfSelf() {
ImmutableButSetsPrivateFieldOfInstanceOfSelf i = new ImmutableButSetsPrivateFieldOfInstanceOfSelf();
this.hashCode();
i.myField = 10;
this.hashCode();
i.myField = 11;
return i;
}
}
class MutableBySettingFieldOnThisInstance {
@SuppressWarnings("unused")
private int myField = 0;
public void setMyField(int newMyField) {
this.myField = newMyField;
}
}
## Instruction:
Update benchmark for setting fields of this type.
git-svn-id: ed609ce04ec9e3c0bc25e071e87814dd6d976548@269 c7a0535c-eda6-11de-83d8-6d5adf01d787
## Code After:
/*
* Mutability Detector
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* Further licensing information for this project can be found in
* license/LICENSE.txt
*/
package org.mutabilitydetector.benchmarks.settermethod;
@SuppressWarnings("unused")
public class ImmutableButSetsPrivateFieldOfInstanceOfSelf {
private int myField = 0;
private ImmutableButSetsPrivateFieldOfInstanceOfSelf fieldOfSelfType = null;
public ImmutableButSetsPrivateFieldOfInstanceOfSelf setPrivateFieldOnInstanceOfSelf() {
ImmutableButSetsPrivateFieldOfInstanceOfSelf i = new ImmutableButSetsPrivateFieldOfInstanceOfSelf();
this.hashCode();
i.myField = 10;
this.hashCode();
i.myField = 11;
return i;
}
}
class MutableBySettingFieldOnThisInstanceAndOtherInstance {
@SuppressWarnings("unused")
private int myField = 0;
public void setMyField(int newMyField, MutableBySettingFieldOnThisInstanceAndOtherInstance otherInstance) {
this.myField = newMyField;
otherInstance.myField = 42;
}
}
|
# ... existing code ...
}
class MutableBySettingFieldOnThisInstanceAndOtherInstance {
@SuppressWarnings("unused")
private int myField = 0;
public void setMyField(int newMyField, MutableBySettingFieldOnThisInstanceAndOtherInstance otherInstance) {
this.myField = newMyField;
otherInstance.myField = 42;
}
}
# ... rest of the code ...
|
7c3c0fac58822bcfa7bbd69de5ce46b36f84740c
|
regulations/generator/link_flattener.py
|
regulations/generator/link_flattener.py
|
import re
# <a> followed by another <a> without any intervening </a>s
link_inside_link_regex = re.compile(
ur"(?P<outer_link><a ((?!</a>).)*)(<a ((?!</a>).)*>"
ur"(?P<internal_content>((?!</a>).)*)</a>)",
re.IGNORECASE | re.DOTALL)
def flatten_links(text):
"""
Fix <a> elements that have embedded <a> elements by
replacing the internal <a> element with its content.
Assumes that the text does not span multiple lines and that
the <a> tags are lowercase.
"""
while True:
text, sub_count = link_inside_link_regex.subn(
ur"\g<outer_link>\g<internal_content>", text)
if sub_count == 0:
return text # Return only when no more subs possible
|
import re
# <a> followed by another <a> without any intervening </a>s
# outer_link - partial outer element up to the inner link
# inner_content - content of the inner_link
link_inside_link_regex = re.compile(
ur"(?P<outer_link><a ((?!</a>).)*)<a .*?>(?P<inner_content>.*?)</a>",
re.IGNORECASE | re.DOTALL)
def flatten_links(text):
"""
Fix <a> elements that have embedded <a> elements by
replacing the internal <a> element with its content.
"""
while True:
text, sub_count = link_inside_link_regex.subn(
ur"\g<outer_link>\g<inner_content>", text)
if sub_count == 0:
return text
|
Simplify regex using non-greedy qualifier
|
Simplify regex using non-greedy qualifier
|
Python
|
cc0-1.0
|
18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,18F/regulations-site,eregs/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site
|
python
|
## Code Before:
import re
# <a> followed by another <a> without any intervening </a>s
link_inside_link_regex = re.compile(
ur"(?P<outer_link><a ((?!</a>).)*)(<a ((?!</a>).)*>"
ur"(?P<internal_content>((?!</a>).)*)</a>)",
re.IGNORECASE | re.DOTALL)
def flatten_links(text):
"""
Fix <a> elements that have embedded <a> elements by
replacing the internal <a> element with its content.
Assumes that the text does not span multiple lines and that
the <a> tags are lowercase.
"""
while True:
text, sub_count = link_inside_link_regex.subn(
ur"\g<outer_link>\g<internal_content>", text)
if sub_count == 0:
return text # Return only when no more subs possible
## Instruction:
Simplify regex using non-greedy qualifier
## Code After:
import re
# <a> followed by another <a> without any intervening </a>s
# outer_link - partial outer element up to the inner link
# inner_content - content of the inner_link
link_inside_link_regex = re.compile(
ur"(?P<outer_link><a ((?!</a>).)*)<a .*?>(?P<inner_content>.*?)</a>",
re.IGNORECASE | re.DOTALL)
def flatten_links(text):
"""
Fix <a> elements that have embedded <a> elements by
replacing the internal <a> element with its content.
"""
while True:
text, sub_count = link_inside_link_regex.subn(
ur"\g<outer_link>\g<inner_content>", text)
if sub_count == 0:
return text
|
// ... existing code ...
import re
# <a> followed by another <a> without any intervening </a>s
# outer_link - partial outer element up to the inner link
# inner_content - content of the inner_link
link_inside_link_regex = re.compile(
ur"(?P<outer_link><a ((?!</a>).)*)<a .*?>(?P<inner_content>.*?)</a>",
re.IGNORECASE | re.DOTALL)
// ... modified code ...
"""
Fix <a> elements that have embedded <a> elements by
replacing the internal <a> element with its content.
"""
while True:
text, sub_count = link_inside_link_regex.subn(
ur"\g<outer_link>\g<inner_content>", text)
if sub_count == 0:
return text
// ... rest of the code ...
|
fa1953b4f475c7a23fb99def73aef6b4059d4f54
|
src/jmh/java/org/apache/poi/benchmark/suite/TestBenchmarks.java
|
src/jmh/java/org/apache/poi/benchmark/suite/TestBenchmarks.java
|
package org.apache.poi.benchmark.suite;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Setup;
import java.io.IOException;
public class TestBenchmarks extends BaseBenchmark {
@Setup
public void setUp() throws IOException {
compileAll();
}
@Benchmark
public void benchmarkTestMain() throws IOException {
testMain();
}
@Benchmark
public void benchmarkTestScratchpad() throws IOException {
testScratchpad();
}
@Benchmark
public void benchmarkTestOOXML() throws IOException {
testOOXML();
}
@Benchmark
public void benchmarkTestOOXMLLite() throws IOException {
testOOXMLLite();
}
@Benchmark
public void benchmarkTestExcelant() throws IOException {
testExcelant();
}
// exclude for now as it always fails and I could not find out why
// @Benchmark
public void benchmarkTestIntegration() throws IOException {
testIntegration();
}
}
|
package org.apache.poi.benchmark.suite;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Setup;
import java.io.IOException;
public class TestBenchmarks extends BaseBenchmark {
@Setup
public void setUp() throws IOException {
compileAll();
}
@Benchmark
public void benchmarkTestMain() throws IOException {
testMain();
}
@Benchmark
public void benchmarkTestScratchpad() throws IOException {
testScratchpad();
}
@Benchmark
public void benchmarkTestOOXML() throws IOException {
testOOXML();
}
@Benchmark
public void benchmarkTestOOXMLLite() throws IOException {
testOOXMLLite();
}
@Benchmark
public void benchmarkTestExcelant() throws IOException {
testExcelant();
}
@Benchmark
public void benchmarkTestIntegration() throws IOException {
testIntegration();
}
}
|
Enable integration-test to start investigating why it fails
|
Enable integration-test to start investigating why it fails
|
Java
|
apache-2.0
|
centic9/poi-benchmark,centic9/poi-benchmark,centic9/poi-benchmark
|
java
|
## Code Before:
package org.apache.poi.benchmark.suite;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Setup;
import java.io.IOException;
public class TestBenchmarks extends BaseBenchmark {
@Setup
public void setUp() throws IOException {
compileAll();
}
@Benchmark
public void benchmarkTestMain() throws IOException {
testMain();
}
@Benchmark
public void benchmarkTestScratchpad() throws IOException {
testScratchpad();
}
@Benchmark
public void benchmarkTestOOXML() throws IOException {
testOOXML();
}
@Benchmark
public void benchmarkTestOOXMLLite() throws IOException {
testOOXMLLite();
}
@Benchmark
public void benchmarkTestExcelant() throws IOException {
testExcelant();
}
// exclude for now as it always fails and I could not find out why
// @Benchmark
public void benchmarkTestIntegration() throws IOException {
testIntegration();
}
}
## Instruction:
Enable integration-test to start investigating why it fails
## Code After:
package org.apache.poi.benchmark.suite;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Setup;
import java.io.IOException;
public class TestBenchmarks extends BaseBenchmark {
@Setup
public void setUp() throws IOException {
compileAll();
}
@Benchmark
public void benchmarkTestMain() throws IOException {
testMain();
}
@Benchmark
public void benchmarkTestScratchpad() throws IOException {
testScratchpad();
}
@Benchmark
public void benchmarkTestOOXML() throws IOException {
testOOXML();
}
@Benchmark
public void benchmarkTestOOXMLLite() throws IOException {
testOOXMLLite();
}
@Benchmark
public void benchmarkTestExcelant() throws IOException {
testExcelant();
}
@Benchmark
public void benchmarkTestIntegration() throws IOException {
testIntegration();
}
}
|
...
testExcelant();
}
@Benchmark
public void benchmarkTestIntegration() throws IOException {
testIntegration();
}
...
|
5d17fe6dc3714cffe951b90be20ca011c32ee220
|
mobile/src/main/java/uk/co/czcz/speedreader/MainActivity.java
|
mobile/src/main/java/uk/co/czcz/speedreader/MainActivity.java
|
package uk.co.czcz.speedreader;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (getSupportFragmentManager().findFragmentById(R.id.fragment_container) == null)
{
// Initial run of this activity, so lets display a fragment
getSupportFragmentManager().beginTransaction().add(new SpeedReadingFragment(), SpeedReadingFragment.TAG).commit();
}
}
}
|
package uk.co.czcz.speedreader;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (getSupportFragmentManager().findFragmentById(R.id.fragment_container) == null)
{
displayInitialSpeedReadingFragment();
}
}
private void displayInitialSpeedReadingFragment() {
getSupportFragmentManager().beginTransaction().add(new SpeedReadingFragment(), SpeedReadingFragment.TAG).commit();
}
}
|
Refactor to make it obvious what's going on.
|
Refactor to make it obvious what's going on.
|
Java
|
apache-2.0
|
ElFeesho/SpeedReader
|
java
|
## Code Before:
package uk.co.czcz.speedreader;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (getSupportFragmentManager().findFragmentById(R.id.fragment_container) == null)
{
// Initial run of this activity, so lets display a fragment
getSupportFragmentManager().beginTransaction().add(new SpeedReadingFragment(), SpeedReadingFragment.TAG).commit();
}
}
}
## Instruction:
Refactor to make it obvious what's going on.
## Code After:
package uk.co.czcz.speedreader;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (getSupportFragmentManager().findFragmentById(R.id.fragment_container) == null)
{
displayInitialSpeedReadingFragment();
}
}
private void displayInitialSpeedReadingFragment() {
getSupportFragmentManager().beginTransaction().add(new SpeedReadingFragment(), SpeedReadingFragment.TAG).commit();
}
}
|
// ... existing code ...
if (getSupportFragmentManager().findFragmentById(R.id.fragment_container) == null)
{
displayInitialSpeedReadingFragment();
}
}
private void displayInitialSpeedReadingFragment() {
getSupportFragmentManager().beginTransaction().add(new SpeedReadingFragment(), SpeedReadingFragment.TAG).commit();
}
}
// ... rest of the code ...
|
faa5072cffecb1f37e7415f58a527863311cd583
|
Speedcivilization/src/org/fountanio/world/World.java
|
Speedcivilization/src/org/fountanio/world/World.java
|
package org.fountanio.world;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import org.fountanio.world.Tile;
import org.fountanio.juancode.out.Main;
public class World {
private Tile[] world;
public final int MAX_WIDTH = 120;
public final int MAX_HEIGHT = 40;
private int tick = 0;
public void loadWorld(String path) throws IOException {
File file = new File(path);
Reader re = new FileReader(file);
BufferedReader reader = new BufferedReader(re);
if (file.exists()) {
Main.getConsole().println("Reading map...");
while (reader.ready()) {
String parse[] = reader.readLine().trim().split(",");
tick++;
}
Main.getConsole().println("Done!");
re.close();
reader.close();
} else {
Main.getConsole().errorln(path + " was not found!");
}
}
public void render() {
}
}
|
package org.fountanio.world;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import org.fountanio.world.Tile;
import org.fountanio.juancode.out.Main;
public class World {
private Tile[] world;
public static final int MAX_WIDTH = 120;
public static final int MAX_HEIGHT = 40;
private int tick = 0;
public void loadWorld(String path) throws IOException {
File file = new File(path);
Reader re = new FileReader(file);
BufferedReader reader = new BufferedReader(re);
if (file.exists()) {
Main.getConsole().println("Reading map...");
while (reader.ready()) {
String parse[] = reader.readLine().trim().split(",");
tick++;
}
Main.getConsole().println("Done!");
re.close();
reader.close();
} else {
Main.getConsole().errorln(path + " was not found!");
}
}
public void render() {
// lil wot
}
}
|
Make MAX_WIDTH and MAX_HEIGHT static
|
Make MAX_WIDTH and MAX_HEIGHT static
|
Java
|
apache-2.0
|
JavaCakess/Speedcivilization
|
java
|
## Code Before:
package org.fountanio.world;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import org.fountanio.world.Tile;
import org.fountanio.juancode.out.Main;
public class World {
private Tile[] world;
public final int MAX_WIDTH = 120;
public final int MAX_HEIGHT = 40;
private int tick = 0;
public void loadWorld(String path) throws IOException {
File file = new File(path);
Reader re = new FileReader(file);
BufferedReader reader = new BufferedReader(re);
if (file.exists()) {
Main.getConsole().println("Reading map...");
while (reader.ready()) {
String parse[] = reader.readLine().trim().split(",");
tick++;
}
Main.getConsole().println("Done!");
re.close();
reader.close();
} else {
Main.getConsole().errorln(path + " was not found!");
}
}
public void render() {
}
}
## Instruction:
Make MAX_WIDTH and MAX_HEIGHT static
## Code After:
package org.fountanio.world;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import org.fountanio.world.Tile;
import org.fountanio.juancode.out.Main;
public class World {
private Tile[] world;
public static final int MAX_WIDTH = 120;
public static final int MAX_HEIGHT = 40;
private int tick = 0;
public void loadWorld(String path) throws IOException {
File file = new File(path);
Reader re = new FileReader(file);
BufferedReader reader = new BufferedReader(re);
if (file.exists()) {
Main.getConsole().println("Reading map...");
while (reader.ready()) {
String parse[] = reader.readLine().trim().split(",");
tick++;
}
Main.getConsole().println("Done!");
re.close();
reader.close();
} else {
Main.getConsole().errorln(path + " was not found!");
}
}
public void render() {
// lil wot
}
}
|
...
public class World {
private Tile[] world;
public static final int MAX_WIDTH = 120;
public static final int MAX_HEIGHT = 40;
private int tick = 0;
public void loadWorld(String path) throws IOException {
...
}
public void render() {
// lil wot
}
}
...
|
6cedfb17afbb3a869336d23cefdfcae1a65754f9
|
tests/test_check.py
|
tests/test_check.py
|
import unittest
from binaryornot import check
class TestIsBinary(unittest.TestCase):
def setUp(self):
pass
def test_is_binary(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
import unittest
from binaryornot.check import is_binary
class TestIsBinary(unittest.TestCase):
def test_css(self):
self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css'))
def test_json(self):
self.assertFalse(is_binary('tests/files/cookiecutter.json'))
def test_eot(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot'))
def test_otf(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf'))
def test_svg(self):
self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg'))
def test_ttf(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf'))
def test_woff(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff'))
def test_txt(self):
self.assertFalse(is_binary('tests/files/robots.txt'))
if __name__ == '__main__':
unittest.main()
|
Add lots of miserably failing tests.
|
Add lots of miserably failing tests.
|
Python
|
bsd-3-clause
|
pombredanne/binaryornot,0k/binaryornot,pombredanne/binaryornot,pombredanne/binaryornot,audreyr/binaryornot,audreyr/binaryornot,hackebrot/binaryornot,hackebrot/binaryornot,0k/binaryornot,audreyr/binaryornot,hackebrot/binaryornot
|
python
|
## Code Before:
import unittest
from binaryornot import check
class TestIsBinary(unittest.TestCase):
def setUp(self):
pass
def test_is_binary(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
## Instruction:
Add lots of miserably failing tests.
## Code After:
import unittest
from binaryornot.check import is_binary
class TestIsBinary(unittest.TestCase):
def test_css(self):
self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css'))
def test_json(self):
self.assertFalse(is_binary('tests/files/cookiecutter.json'))
def test_eot(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot'))
def test_otf(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf'))
def test_svg(self):
self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg'))
def test_ttf(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf'))
def test_woff(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff'))
def test_txt(self):
self.assertFalse(is_binary('tests/files/robots.txt'))
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
import unittest
from binaryornot.check import is_binary
class TestIsBinary(unittest.TestCase):
def test_css(self):
self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css'))
def test_json(self):
self.assertFalse(is_binary('tests/files/cookiecutter.json'))
def test_eot(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot'))
def test_otf(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf'))
def test_svg(self):
self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg'))
def test_ttf(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf'))
def test_woff(self):
self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff'))
def test_txt(self):
self.assertFalse(is_binary('tests/files/robots.txt'))
if __name__ == '__main__':
unittest.main()
// ... rest of the code ...
|
d3a9657b7318327a59c3eee08a25f1e5c4ba4edf
|
django_casscache.py
|
django_casscache.py
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
Add a method to noop the make_key in Django
|
Add a method to noop the make_key in Django
|
Python
|
bsd-3-clause
|
mattrobenolt/django-casscache
|
python
|
## Code Before:
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
## Instruction:
Add a method to noop the make_key in Django
## Code After:
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
...
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
...
|
c2aee38958e9e4fca9545a54c6306a3bdb012f05
|
src/main/java/org/openlmis/fulfillment/service/ResourceNames.java
|
src/main/java/org/openlmis/fulfillment/service/ResourceNames.java
|
/*
* This program is part of the OpenLMIS logistics management information system platform software.
* Copyright © 2017 VillageReach
*
* This program is free software: you can redistribute it and/or modify it under the terms
* of the GNU Affero General Public License as published by the Free Software Foundation, either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details. You should have received a copy of
* the GNU Affero General Public License along with this program. If not, see
* http://www.gnu.org/licenses. For additional information contact [email protected].
*/
package org.openlmis.fulfillment.service;
import org.apache.commons.lang3.StringUtils;
public class ResourceNames {
private ResourceNames() {}
public static final String SEPARATOR = "/";
public static final String BASE_PATH = "/api";
public static final String USERS = "users";
public static String getUsersPath() {
return getPath(USERS);
}
private static String getPath(String resourseName) {
return StringUtils.joinWith(SEPARATOR, BASE_PATH, resourseName) + SEPARATOR;
}
}
|
/*
* This program is part of the OpenLMIS logistics management information system platform software.
* Copyright © 2017 VillageReach
*
* This program is free software: you can redistribute it and/or modify it under the terms
* of the GNU Affero General Public License as published by the Free Software Foundation, either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details. You should have received a copy of
* the GNU Affero General Public License along with this program. If not, see
* http://www.gnu.org/licenses. For additional information contact [email protected].
*/
package org.openlmis.fulfillment.service;
import org.apache.commons.lang3.StringUtils;
public class ResourceNames {
public static final String SEPARATOR = "/";
public static final String BASE_PATH = "/api";
public static final String USERS = "users";
private ResourceNames() {}
public static String getUsersPath() {
return getPath(USERS);
}
private static String getPath(String resourseName) {
return StringUtils.joinWith(SEPARATOR, BASE_PATH, resourseName) + SEPARATOR;
}
}
|
Move variable to comply with Java Code Conventions.
|
OLMIS-3608: Move variable to comply with Java Code Conventions.
|
Java
|
agpl-3.0
|
OpenLMIS/openlmis-fulfillment,OpenLMIS/openlmis-fulfillment,OpenLMIS/openlmis-fulfillment,OpenLMIS/openlmis-fulfillment
|
java
|
## Code Before:
/*
* This program is part of the OpenLMIS logistics management information system platform software.
* Copyright © 2017 VillageReach
*
* This program is free software: you can redistribute it and/or modify it under the terms
* of the GNU Affero General Public License as published by the Free Software Foundation, either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details. You should have received a copy of
* the GNU Affero General Public License along with this program. If not, see
* http://www.gnu.org/licenses. For additional information contact [email protected].
*/
package org.openlmis.fulfillment.service;
import org.apache.commons.lang3.StringUtils;
public class ResourceNames {
private ResourceNames() {}
public static final String SEPARATOR = "/";
public static final String BASE_PATH = "/api";
public static final String USERS = "users";
public static String getUsersPath() {
return getPath(USERS);
}
private static String getPath(String resourseName) {
return StringUtils.joinWith(SEPARATOR, BASE_PATH, resourseName) + SEPARATOR;
}
}
## Instruction:
OLMIS-3608: Move variable to comply with Java Code Conventions.
## Code After:
/*
* This program is part of the OpenLMIS logistics management information system platform software.
* Copyright © 2017 VillageReach
*
* This program is free software: you can redistribute it and/or modify it under the terms
* of the GNU Affero General Public License as published by the Free Software Foundation, either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details. You should have received a copy of
* the GNU Affero General Public License along with this program. If not, see
* http://www.gnu.org/licenses. For additional information contact [email protected].
*/
package org.openlmis.fulfillment.service;
import org.apache.commons.lang3.StringUtils;
public class ResourceNames {
public static final String SEPARATOR = "/";
public static final String BASE_PATH = "/api";
public static final String USERS = "users";
private ResourceNames() {}
public static String getUsersPath() {
return getPath(USERS);
}
private static String getPath(String resourseName) {
return StringUtils.joinWith(SEPARATOR, BASE_PATH, resourseName) + SEPARATOR;
}
}
|
# ... existing code ...
import org.apache.commons.lang3.StringUtils;
public class ResourceNames {
public static final String SEPARATOR = "/";
public static final String BASE_PATH = "/api";
public static final String USERS = "users";
private ResourceNames() {}
public static String getUsersPath() {
return getPath(USERS);
# ... rest of the code ...
|
4725a80e6a02a08ef6081eac9261cb420bdc1fee
|
django_countries/templatetags/countries.py
|
django_countries/templatetags/countries.py
|
import django
from django import template
from django_countries.fields import Country, countries
register = template.Library()
if django.VERSION < (1, 9):
# Support older versions without implicit assignment support in simple_tag.
simple_tag = register.assignment_tag
else:
simple_tag = register.simple_tag
@simple_tag
def get_country(code):
return Country(code=code)
@simple_tag
def get_countries():
return list(countries)
|
import django
from django import template
from django_countries.fields import Country, countries
register = template.Library()
@register.simple_tag
def get_country(code):
return Country(code=code)
@register.simple_tag
def get_countries():
return list(countries)
|
Remove Django 1.9 simple_tag reference
|
Remove Django 1.9 simple_tag reference
|
Python
|
mit
|
SmileyChris/django-countries
|
python
|
## Code Before:
import django
from django import template
from django_countries.fields import Country, countries
register = template.Library()
if django.VERSION < (1, 9):
# Support older versions without implicit assignment support in simple_tag.
simple_tag = register.assignment_tag
else:
simple_tag = register.simple_tag
@simple_tag
def get_country(code):
return Country(code=code)
@simple_tag
def get_countries():
return list(countries)
## Instruction:
Remove Django 1.9 simple_tag reference
## Code After:
import django
from django import template
from django_countries.fields import Country, countries
register = template.Library()
@register.simple_tag
def get_country(code):
return Country(code=code)
@register.simple_tag
def get_countries():
return list(countries)
|
# ... existing code ...
import django
from django import template
from django_countries.fields import Country, countries
register = template.Library()
@register.simple_tag
def get_country(code):
return Country(code=code)
@register.simple_tag
def get_countries():
return list(countries)
# ... rest of the code ...
|
504d515f03fffdc999cc21a9615ea659a4e29f3b
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='Frappy',
version='0.1',
description='Framework for creating Web APIs in Python',
author='Luke Lee',
author_email='[email protected]',
url='http://github.com/durden/frappy',
packages=['frappy', 'frappy.services', 'frappy.core']
)
|
from distutils.core import setup
setup(name='Frappy',
version='0.1',
description='Framework for creating Web APIs in Python',
author='Luke Lee',
author_email='[email protected]',
url='http://github.com/durden/frappy',
packages=['frappy', 'frappy.services', 'frappy.core',
'frappy.services.twitter']
)
|
Add twitter services to install since they work again
|
Add twitter services to install since they work again
|
Python
|
mit
|
durden/frappy
|
python
|
## Code Before:
from distutils.core import setup
setup(name='Frappy',
version='0.1',
description='Framework for creating Web APIs in Python',
author='Luke Lee',
author_email='[email protected]',
url='http://github.com/durden/frappy',
packages=['frappy', 'frappy.services', 'frappy.core']
)
## Instruction:
Add twitter services to install since they work again
## Code After:
from distutils.core import setup
setup(name='Frappy',
version='0.1',
description='Framework for creating Web APIs in Python',
author='Luke Lee',
author_email='[email protected]',
url='http://github.com/durden/frappy',
packages=['frappy', 'frappy.services', 'frappy.core',
'frappy.services.twitter']
)
|
# ... existing code ...
author='Luke Lee',
author_email='[email protected]',
url='http://github.com/durden/frappy',
packages=['frappy', 'frappy.services', 'frappy.core',
'frappy.services.twitter']
)
# ... rest of the code ...
|
1b992df4b7e8a36a5836b05217861cb1a7c62f8b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0']
)
|
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
|
Add optional dependencies for the docs
|
Add optional dependencies for the docs
|
Python
|
apache-2.0
|
DLTK/DLTK
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0']
)
## Instruction:
Add optional dependencies for the docs
## Code After:
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
|
// ... existing code ...
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
// ... rest of the code ...
|
29fde077a72da2b2729e88455dc03dbd4540f1a4
|
api/src/main/java/org/motechproject/nms/api/web/contract/mobileAcademy/CourseVersionResponse.java
|
api/src/main/java/org/motechproject/nms/api/web/contract/mobileAcademy/CourseVersionResponse.java
|
package org.motechproject.nms.api.web.contract.mobileAcademy;
/**
* API response object for getting course version
*/
public class CourseVersionResponse {
private int courseVersion;
public CourseVersionResponse() {
}
public CourseVersionResponse(int courseVersion) {
this.courseVersion = courseVersion;
}
public Integer getCourseVersion() {
return courseVersion;
}
public void setCourseVersion(Integer courseVersion) {
this.courseVersion = courseVersion;
}
}
|
package org.motechproject.nms.api.web.contract.mobileAcademy;
/**
* API response object for getting course version
*/
public class CourseVersionResponse {
private long courseVersion;
public CourseVersionResponse() {
}
public CourseVersionResponse(long courseVersion) {
this.courseVersion = courseVersion;
}
public Long getCourseVersion() {
return courseVersion;
}
public void setCourseVersion(Long courseVersion) {
this.courseVersion = courseVersion;
}
}
|
Fix a bug in MA tests. Casting version.getMillis to an int resulted in the integer rolling over and returning a negative number
|
Fix a bug in MA tests. Casting version.getMillis to an int resulted in the integer rolling over
and returning a negative number
|
Java
|
bsd-3-clause
|
ngraczewski/mim,motech-implementations/mim,motech-implementations/mim,motech-implementations/mim,ngraczewski/mim,ngraczewski/mim,motech-implementations/mim
|
java
|
## Code Before:
package org.motechproject.nms.api.web.contract.mobileAcademy;
/**
* API response object for getting course version
*/
public class CourseVersionResponse {
private int courseVersion;
public CourseVersionResponse() {
}
public CourseVersionResponse(int courseVersion) {
this.courseVersion = courseVersion;
}
public Integer getCourseVersion() {
return courseVersion;
}
public void setCourseVersion(Integer courseVersion) {
this.courseVersion = courseVersion;
}
}
## Instruction:
Fix a bug in MA tests. Casting version.getMillis to an int resulted in the integer rolling over
and returning a negative number
## Code After:
package org.motechproject.nms.api.web.contract.mobileAcademy;
/**
* API response object for getting course version
*/
public class CourseVersionResponse {
private long courseVersion;
public CourseVersionResponse() {
}
public CourseVersionResponse(long courseVersion) {
this.courseVersion = courseVersion;
}
public Long getCourseVersion() {
return courseVersion;
}
public void setCourseVersion(Long courseVersion) {
this.courseVersion = courseVersion;
}
}
|
# ... existing code ...
*/
public class CourseVersionResponse {
private long courseVersion;
public CourseVersionResponse() {
}
public CourseVersionResponse(long courseVersion) {
this.courseVersion = courseVersion;
}
public Long getCourseVersion() {
return courseVersion;
}
public void setCourseVersion(Long courseVersion) {
this.courseVersion = courseVersion;
}
}
# ... rest of the code ...
|
28b95d48a51e8cfb1c3807b7fb175c4b89dbbf67
|
src/libreset/avl.c
|
src/libreset/avl.c
|
/*
*
*
* forward declarations
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
);
/*
*
*
* interface implementation
*
*
*/
struct avl*
avl_alloc(void) {
return calloc(1, sizeof(struct avl));
}
int
avl_destroy(
struct avl* avl //!< The avl tree
) {
if (avl && avl->root) {
destroy_subtree(avl->root);
}
free(avl);
return 1;
}
/*
*
*
* implementation of internal functions
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
) {
if (node->l) {
destroy_subtree(node->l);
}
if (node->r) {
destroy_subtree(node->r);
}
free(node);
}
|
/*
*
*
* forward declarations
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
);
/**
* Rotate a node counter-clockwise
*
* @return new root or NULL, if the rotation could not be performed
*/
static struct avl_el*
rotate_left(
struct avl_el* node //!< The node to rotate
);
/**
* Rotate a node clockwise
*
* @return new root or NULL, if the rotation could not be performed
*/
static struct avl_el*
rotate_right(
struct avl_el* node //!< The node to rotate
);
/*
*
*
* interface implementation
*
*
*/
struct avl*
avl_alloc(void) {
return calloc(1, sizeof(struct avl));
}
int
avl_destroy(
struct avl* avl //!< The avl tree
) {
if (avl && avl->root) {
destroy_subtree(avl->root);
}
free(avl);
return 1;
}
/*
*
*
* implementation of internal functions
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
) {
if (node->l) {
destroy_subtree(node->l);
}
if (node->r) {
destroy_subtree(node->r);
}
free(node);
}
static struct avl_el*
rotate_left(
struct avl_el* node
) {
return 0;
}
static struct avl_el*
rotate_right(
struct avl_el* node
) {
return 0;
}
|
Add declaration and stubs for rotations
|
Add declaration and stubs for rotations
|
C
|
lgpl-2.1
|
waysome/libreset,waysome/libreset
|
c
|
## Code Before:
/*
*
*
* forward declarations
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
);
/*
*
*
* interface implementation
*
*
*/
struct avl*
avl_alloc(void) {
return calloc(1, sizeof(struct avl));
}
int
avl_destroy(
struct avl* avl //!< The avl tree
) {
if (avl && avl->root) {
destroy_subtree(avl->root);
}
free(avl);
return 1;
}
/*
*
*
* implementation of internal functions
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
) {
if (node->l) {
destroy_subtree(node->l);
}
if (node->r) {
destroy_subtree(node->r);
}
free(node);
}
## Instruction:
Add declaration and stubs for rotations
## Code After:
/*
*
*
* forward declarations
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
);
/**
* Rotate a node counter-clockwise
*
* @return new root or NULL, if the rotation could not be performed
*/
static struct avl_el*
rotate_left(
struct avl_el* node //!< The node to rotate
);
/**
* Rotate a node clockwise
*
* @return new root or NULL, if the rotation could not be performed
*/
static struct avl_el*
rotate_right(
struct avl_el* node //!< The node to rotate
);
/*
*
*
* interface implementation
*
*
*/
struct avl*
avl_alloc(void) {
return calloc(1, sizeof(struct avl));
}
int
avl_destroy(
struct avl* avl //!< The avl tree
) {
if (avl && avl->root) {
destroy_subtree(avl->root);
}
free(avl);
return 1;
}
/*
*
*
* implementation of internal functions
*
*
*/
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
) {
if (node->l) {
destroy_subtree(node->l);
}
if (node->r) {
destroy_subtree(node->r);
}
free(node);
}
static struct avl_el*
rotate_left(
struct avl_el* node
) {
return 0;
}
static struct avl_el*
rotate_right(
struct avl_el* node
) {
return 0;
}
|
# ... existing code ...
static void
destroy_subtree(
struct avl_el* node //!< A node to destroy
);
/**
* Rotate a node counter-clockwise
*
* @return new root or NULL, if the rotation could not be performed
*/
static struct avl_el*
rotate_left(
struct avl_el* node //!< The node to rotate
);
/**
* Rotate a node clockwise
*
* @return new root or NULL, if the rotation could not be performed
*/
static struct avl_el*
rotate_right(
struct avl_el* node //!< The node to rotate
);
# ... modified code ...
free(node);
}
static struct avl_el*
rotate_left(
struct avl_el* node
) {
return 0;
}
static struct avl_el*
rotate_right(
struct avl_el* node
) {
return 0;
}
# ... rest of the code ...
|
5b34711e3f7b2c183ebb4be504c0914f8c55f6e3
|
setup.py
|
setup.py
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='mockito-edgeware',
version='1.0.0',
packages=['mockito', 'mockito_test', 'mockito_util'],
url='https://github.com/edgeware/mockito-python',
download_url='http://pypi.edgeware.tv/simple/mockito-edgeware',
maintainer='Mockito Maintainers',
maintainer_email='[email protected]',
license='MIT',
description='Spying framework',
long_description='Mockito is a spying framework based on Java library with the same name.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Testing',
'Programming Language :: Python :: 2'
'Programming Language :: Python :: 3'
],
test_suite = 'nose.collector',
py_modules = ['distribute_setup'],
setup_requires = ['nose'],
**extra
)
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='mockito',
version='0.5.1-edgeware',
packages=['mockito', 'mockito_test', 'mockito_util'],
url='https://github.com/edgeware/mockito-python',
download_url='http://pypi.edgeware.tv/simple/mockito',
maintainer='Mockito Maintainers',
maintainer_email='[email protected]',
license='MIT',
description='Spying framework',
long_description=('Mockito is a spying framework based on Java library'
'with the same name.'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Testing',
'Programming Language :: Python :: 2'
'Programming Language :: Python :: 3'
],
test_suite='nose.collector',
py_modules=['distribute_setup'],
setup_requires=['nose'],
**extra)
|
Use version suffix instead of renaming
|
Use version suffix instead of renaming
|
Python
|
mit
|
edgeware/mockito-python,edgeware/mockito-python
|
python
|
## Code Before:
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='mockito-edgeware',
version='1.0.0',
packages=['mockito', 'mockito_test', 'mockito_util'],
url='https://github.com/edgeware/mockito-python',
download_url='http://pypi.edgeware.tv/simple/mockito-edgeware',
maintainer='Mockito Maintainers',
maintainer_email='[email protected]',
license='MIT',
description='Spying framework',
long_description='Mockito is a spying framework based on Java library with the same name.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Testing',
'Programming Language :: Python :: 2'
'Programming Language :: Python :: 3'
],
test_suite = 'nose.collector',
py_modules = ['distribute_setup'],
setup_requires = ['nose'],
**extra
)
## Instruction:
Use version suffix instead of renaming
## Code After:
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='mockito',
version='0.5.1-edgeware',
packages=['mockito', 'mockito_test', 'mockito_util'],
url='https://github.com/edgeware/mockito-python',
download_url='http://pypi.edgeware.tv/simple/mockito',
maintainer='Mockito Maintainers',
maintainer_email='[email protected]',
license='MIT',
description='Spying framework',
long_description=('Mockito is a spying framework based on Java library'
'with the same name.'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Testing',
'Programming Language :: Python :: 2'
'Programming Language :: Python :: 3'
],
test_suite='nose.collector',
py_modules=['distribute_setup'],
setup_requires=['nose'],
**extra)
|
...
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='mockito',
version='0.5.1-edgeware',
packages=['mockito', 'mockito_test', 'mockito_util'],
url='https://github.com/edgeware/mockito-python',
download_url='http://pypi.edgeware.tv/simple/mockito',
maintainer='Mockito Maintainers',
maintainer_email='[email protected]',
license='MIT',
description='Spying framework',
long_description=('Mockito is a spying framework based on Java library'
'with the same name.'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
...
'Programming Language :: Python :: 2'
'Programming Language :: Python :: 3'
],
test_suite='nose.collector',
py_modules=['distribute_setup'],
setup_requires=['nose'],
**extra)
...
|
d391f6fe8371b045cd684841da59984e5b28b1b3
|
plata/product/producer/models.py
|
plata/product/producer/models.py
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer,
related_name='products', verbose_name=_('producer')))
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
Revert "It shouldn't be that hard to define a producer, really"
|
Revert "It shouldn't be that hard to define a producer, really"
Sometimes it is.
This reverts commit 883c518d8844bd006d6abc783b315aea01d59b69.
|
Python
|
bsd-3-clause
|
allink/plata,armicron/plata,stefanklug/plata,armicron/plata,armicron/plata
|
python
|
## Code Before:
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer,
related_name='products', verbose_name=_('producer')))
## Instruction:
Revert "It shouldn't be that hard to define a producer, really"
Sometimes it is.
This reverts commit 883c518d8844bd006d6abc783b315aea01d59b69.
## Code After:
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
...
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
...
|
305d04fc0841035bf744480261017c14ae3045b0
|
syntax_makefile.py
|
syntax_makefile.py
|
import wx.stc
ident = "makefile"
name = "Makefile"
extensions = ["Makefile", "*.mk"]
lexer = wx.stc.STC_LEX_MAKEFILE
indent = 8
use_tabs = True
stylespecs = (
(wx.stc.STC_STYLE_DEFAULT, ""),
)
keywords = ""
|
import wx.stc
ident = "makefile"
name = "Makefile"
extensions = ["*Makefile", "*makefile", "*.mk"]
lexer = wx.stc.STC_LEX_MAKEFILE
indent = 8
use_tabs = True
stylespecs = (
(wx.stc.STC_STYLE_DEFAULT, ""),
)
keywords = ""
|
Make files ending in makefile or Makefile.
|
Make files ending in makefile or Makefile.
|
Python
|
mit
|
shaurz/devo
|
python
|
## Code Before:
import wx.stc
ident = "makefile"
name = "Makefile"
extensions = ["Makefile", "*.mk"]
lexer = wx.stc.STC_LEX_MAKEFILE
indent = 8
use_tabs = True
stylespecs = (
(wx.stc.STC_STYLE_DEFAULT, ""),
)
keywords = ""
## Instruction:
Make files ending in makefile or Makefile.
## Code After:
import wx.stc
ident = "makefile"
name = "Makefile"
extensions = ["*Makefile", "*makefile", "*.mk"]
lexer = wx.stc.STC_LEX_MAKEFILE
indent = 8
use_tabs = True
stylespecs = (
(wx.stc.STC_STYLE_DEFAULT, ""),
)
keywords = ""
|
...
ident = "makefile"
name = "Makefile"
extensions = ["*Makefile", "*makefile", "*.mk"]
lexer = wx.stc.STC_LEX_MAKEFILE
indent = 8
use_tabs = True
...
|
7c10f4511e64bedad079e39834d6b845cfb942cc
|
setup.py
|
setup.py
|
from setuptools import setup
VERSION = '0.1.0'
setup(
name='shelly',
description="Standalone tools to make the shell better.",
long_description="""
Shelly makes processing line-by-line data in the shell easier, by providing
access to useful functional programming primitives and interactive tools.
""",
url="http://bitbucket.org/larsyencken/shelly/",
version=VERSION,
author="Lars Yencken",
author_email="[email protected]",
license="BSD",
scripts=[
'drop',
'take',
'groupby',
'random',
'max',
'trickle',
'min',
'range',
],
)
|
from setuptools import setup
VERSION = '0.1.0'
setup(
name='shelly',
description="Standalone tools to make the shell better.",
long_description="""
Shelly makes processing line-by-line data in the shell easier, by providing
access to useful functional programming primitives and interactive tools.
""",
url="http://bitbucket.org/larsyencken/shelly/",
version=VERSION,
author="Lars Yencken",
author_email="[email protected]",
license="BSD",
scripts=[
'drop',
'exists',
'groupby',
'max',
'min',
'random',
'range',
'subsample',
'take',
'trickle',
],
)
|
Update the list of scripts to install.
|
Update the list of scripts to install.
|
Python
|
isc
|
larsyencken/shelly
|
python
|
## Code Before:
from setuptools import setup
VERSION = '0.1.0'
setup(
name='shelly',
description="Standalone tools to make the shell better.",
long_description="""
Shelly makes processing line-by-line data in the shell easier, by providing
access to useful functional programming primitives and interactive tools.
""",
url="http://bitbucket.org/larsyencken/shelly/",
version=VERSION,
author="Lars Yencken",
author_email="[email protected]",
license="BSD",
scripts=[
'drop',
'take',
'groupby',
'random',
'max',
'trickle',
'min',
'range',
],
)
## Instruction:
Update the list of scripts to install.
## Code After:
from setuptools import setup
VERSION = '0.1.0'
setup(
name='shelly',
description="Standalone tools to make the shell better.",
long_description="""
Shelly makes processing line-by-line data in the shell easier, by providing
access to useful functional programming primitives and interactive tools.
""",
url="http://bitbucket.org/larsyencken/shelly/",
version=VERSION,
author="Lars Yencken",
author_email="[email protected]",
license="BSD",
scripts=[
'drop',
'exists',
'groupby',
'max',
'min',
'random',
'range',
'subsample',
'take',
'trickle',
],
)
|
// ... existing code ...
license="BSD",
scripts=[
'drop',
'exists',
'groupby',
'max',
'min',
'random',
'range',
'subsample',
'take',
'trickle',
],
)
// ... rest of the code ...
|
69de2261c30a8bab1ac4d0749cf32baec49e0cc4
|
webapp/byceps/blueprints/board/views.py
|
webapp/byceps/blueprints/board/views.py
|
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
|
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
Throw 404 if category/topic with given id is not found.
|
Throw 404 if category/topic with given id is not found.
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps
|
python
|
## Code Before:
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
## Instruction:
Throw 404 if category/topic with given id is not found.
## Code After:
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
// ... existing code ...
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
// ... modified code ...
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
// ... rest of the code ...
|
5e4d38512a9e6f3c0ff098f599f0851ca0e99e5b
|
python/src/com/jetbrains/python/codeInsight/PyGotoTargetRendererProvider.java
|
python/src/com/jetbrains/python/codeInsight/PyGotoTargetRendererProvider.java
|
package com.jetbrains.python.codeInsight;
import com.intellij.codeInsight.navigation.GotoTargetRendererProvider;
import com.intellij.ide.util.PsiElementListCellRenderer;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.jetbrains.python.psi.PyElement;
/**
* @author yole
*/
public class PyGotoTargetRendererProvider implements GotoTargetRendererProvider {
public PsiElementListCellRenderer getRenderer(final PsiElement[] elements) {
for(PsiElement element: elements) {
if (!(element instanceof PyElement) || !(element instanceof PsiNamedElement)) return null;
}
return new PyElementListCellRenderer();
}
}
|
package com.jetbrains.python.codeInsight;
import com.intellij.codeInsight.navigation.GotoTargetRendererProvider;
import com.intellij.ide.util.PsiElementListCellRenderer;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.jetbrains.python.psi.PyElement;
/**
* @author yole
*/
public class PyGotoTargetRendererProvider implements GotoTargetRendererProvider {
public PsiElementListCellRenderer getRenderer(final PsiElement element) {
if (element instanceof PyElement && element instanceof PsiNamedElement) return new PyElementListCellRenderer();
return null;
}
}
|
Create Test: refactoring + small fixes
|
Create Test: refactoring + small fixes
|
Java
|
apache-2.0
|
wreckJ/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,allotria/intellij-community,asedunov/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,allotria/intellij-community,ryano144/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,slisson/intellij-community,supersven/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,caot/intellij-community,hurricup/intellij-community,clumsy/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,robovm/robovm-studio,ryano144/intellij-community,FHannes/intellij-community,xfournet/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,holmes/intellij-community,fnouama/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,signed/intellij-community,kool79/intellij-community,adedayo/intellij-community,hurricup/intellij-community,FHannes/intellij-community,ryano144/intellij-community,petteyg/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,signed/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,samthor/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,diorcety/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,semonte/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,semonte/intellij-community,allotria/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,retomerz/intellij-community,allotria/intellij-community,FHannes/intellij-community,blademainer/intellij-community,holmes/intellij-community,amith01994/intellij-community,kdwink/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,caot/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,dslomov/intellij-community,petteyg/intellij-community,izonder/intellij-community,hurricup/intellij-community,blademainer/intellij-community,fnouama/intellij-community,semonte/intellij-community,fnouama/intellij-community,diorcety/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,caot/intellij-community,robovm/robovm-studio,caot/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,caot/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,slisson/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,kool79/intellij-community,suncycheng/intellij-community,izonder/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,slisson/intellij-community,caot/intellij-community,clumsy/intellij-community,amith01994/intellij-community,robovm/robovm-studio,vladmm/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,caot/intellij-community,samthor/intellij-community,youdonghai/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,petteyg/intellij-community,holmes/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,petteyg/intellij-community,slisson/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,dslomov/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,fitermay/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,slisson/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,signed/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,retomerz/intellij-community,caot/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,petteyg/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,holmes/intellij-community,kool79/intellij-community,FHannes/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,semonte/intellij-community,xfournet/intellij-community,jagguli/intellij-community,supersven/intellij-community,kool79/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,clumsy/intellij-community,amith01994/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,amith01994/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,caot/intellij-community,blademainer/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,samthor/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,supersven/intellij-community,adedayo/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,holmes/intellij-community,semonte/intellij-community,ibinti/intellij-community,allotria/intellij-community,clumsy/intellij-community,retomerz/intellij-community,signed/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,samthor/intellij-community,da1z/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,asedunov/intellij-community,ryano144/intellij-community,supersven/intellij-community,robovm/robovm-studio,kdwink/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,ibinti/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,caot/intellij-community,robovm/robovm-studio,fitermay/intellij-community,samthor/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,akosyakov/intellij-community,kool79/intellij-community,semonte/intellij-community,holmes/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,fnouama/intellij-community,slisson/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,supersven/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,robovm/robovm-studio,blademainer/intellij-community,samthor/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,ryano144/intellij-community,ryano144/intellij-community,supersven/intellij-community,kool79/intellij-community,robovm/robovm-studio,xfournet/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,da1z/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,FHannes/intellij-community,apixandru/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,signed/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,xfournet/intellij-community,apixandru/intellij-community,retomerz/intellij-community,semonte/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,kool79/intellij-community,hurricup/intellij-community,signed/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,apixandru/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,signed/intellij-community,hurricup/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,adedayo/intellij-community,hurricup/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,signed/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,FHannes/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,signed/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,ryano144/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,holmes/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,diorcety/intellij-community,apixandru/intellij-community,apixandru/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,asedunov/intellij-community,FHannes/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,slisson/intellij-community,da1z/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,FHannes/intellij-community,supersven/intellij-community,diorcety/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,apixandru/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,holmes/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,supersven/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,robovm/robovm-studio,signed/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,jagguli/intellij-community,kool79/intellij-community,clumsy/intellij-community,amith01994/intellij-community,adedayo/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,semonte/intellij-community,Distrotech/intellij-community,samthor/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,signed/intellij-community,amith01994/intellij-community,da1z/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,supersven/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,semonte/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,izonder/intellij-community,Distrotech/intellij-community,caot/intellij-community,ibinti/intellij-community,izonder/intellij-community,vvv1559/intellij-community,da1z/intellij-community,kdwink/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,samthor/intellij-community
|
java
|
## Code Before:
package com.jetbrains.python.codeInsight;
import com.intellij.codeInsight.navigation.GotoTargetRendererProvider;
import com.intellij.ide.util.PsiElementListCellRenderer;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.jetbrains.python.psi.PyElement;
/**
* @author yole
*/
public class PyGotoTargetRendererProvider implements GotoTargetRendererProvider {
public PsiElementListCellRenderer getRenderer(final PsiElement[] elements) {
for(PsiElement element: elements) {
if (!(element instanceof PyElement) || !(element instanceof PsiNamedElement)) return null;
}
return new PyElementListCellRenderer();
}
}
## Instruction:
Create Test: refactoring + small fixes
## Code After:
package com.jetbrains.python.codeInsight;
import com.intellij.codeInsight.navigation.GotoTargetRendererProvider;
import com.intellij.ide.util.PsiElementListCellRenderer;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.jetbrains.python.psi.PyElement;
/**
* @author yole
*/
public class PyGotoTargetRendererProvider implements GotoTargetRendererProvider {
public PsiElementListCellRenderer getRenderer(final PsiElement element) {
if (element instanceof PyElement && element instanceof PsiNamedElement) return new PyElementListCellRenderer();
return null;
}
}
|
...
* @author yole
*/
public class PyGotoTargetRendererProvider implements GotoTargetRendererProvider {
public PsiElementListCellRenderer getRenderer(final PsiElement element) {
if (element instanceof PyElement && element instanceof PsiNamedElement) return new PyElementListCellRenderer();
return null;
}
}
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.