commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
9d85b5d5a0a1124ddb64411516a2e8fb1394e408
|
hello-world/src/test/java/HelloWorldTest.java
|
hello-world/src/test/java/HelloWorldTest.java
|
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class HelloWorldTest {
@Test
public void helloNoName() {
assertEquals("Hello, World!", HelloWorld.hello(""));
assertEquals("Hello, World!", HelloWorld.hello(null));
}
@Test
public void helloSampleName() {
assertEquals("Hello, Alice!", HelloWorld.hello("Alice"));
}
@Test
public void helloAnotherSampleName() throws Exception {
assertEquals("Hello, Bob!", HelloWorld.hello("Bob"));
}
}
|
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class HelloWorldTest {
@Test
public void helloNoName() {
assertEquals("Hello, World!", HelloWorld.hello(""));
assertEquals("Hello, World!", HelloWorld.hello(null));
}
@Test
public void helloSampleName() {
assertEquals("Hello, Alice!", HelloWorld.hello("Alice"));
}
@Test
public void helloAnotherSampleName() {
assertEquals("Hello, Bob!", HelloWorld.hello("Bob"));
}
}
|
Remove needless Exception from method signature
|
Remove needless Exception from method signature
Fixes https://github.com/winterchord/xjava/commit/d09fb713d19abbcbdaaa979adb59aa7fc94532fd#commitcomment-14962172
The original PR (https://github.com/exercism/xjava/pull/69) that this issue
could have been fixed in was merged before I could fix this issue.
|
Java
|
mit
|
exercism/xjava,FridaTveit/xjava,FridaTveit/xjava,jtigger/xjava,nickRm/xjava,jtigger/xjava,exercism/xjava,nickRm/xjava,jmluy/xjava,jmluy/xjava
|
java
|
## Code Before:
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class HelloWorldTest {
@Test
public void helloNoName() {
assertEquals("Hello, World!", HelloWorld.hello(""));
assertEquals("Hello, World!", HelloWorld.hello(null));
}
@Test
public void helloSampleName() {
assertEquals("Hello, Alice!", HelloWorld.hello("Alice"));
}
@Test
public void helloAnotherSampleName() throws Exception {
assertEquals("Hello, Bob!", HelloWorld.hello("Bob"));
}
}
## Instruction:
Remove needless Exception from method signature
Fixes https://github.com/winterchord/xjava/commit/d09fb713d19abbcbdaaa979adb59aa7fc94532fd#commitcomment-14962172
The original PR (https://github.com/exercism/xjava/pull/69) that this issue
could have been fixed in was merged before I could fix this issue.
## Code After:
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class HelloWorldTest {
@Test
public void helloNoName() {
assertEquals("Hello, World!", HelloWorld.hello(""));
assertEquals("Hello, World!", HelloWorld.hello(null));
}
@Test
public void helloSampleName() {
assertEquals("Hello, Alice!", HelloWorld.hello("Alice"));
}
@Test
public void helloAnotherSampleName() {
assertEquals("Hello, Bob!", HelloWorld.hello("Bob"));
}
}
|
// ... existing code ...
}
@Test
public void helloAnotherSampleName() {
assertEquals("Hello, Bob!", HelloWorld.hello("Bob"));
}
}
// ... rest of the code ...
|
44766469827fb3a0966b1370022834da87d8b3e8
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="sandsnake",
license='Apache License 2.0',
version="0.1.0",
description="Manage activity indexes for objects.",
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="[email protected]",
url="https://github.com/numan/sandsnake",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus>=0.10.5',
'redis>=2.7.2',
'python-dateutil==1.5',
],
tests_require=[
'nose>=1.0',
],
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
from setuptools import setup, find_packages
setup(
name="sandsnake",
license='Apache License 2.0',
version="0.1.0",
description="Sorted indexes backed by redis.",
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="[email protected]",
url="https://github.com/numan/sandsnake",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus>=0.10.5',
'redis>=2.7.2',
'python-dateutil==1.5',
],
tests_require=[
'nose>=1.0',
],
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
Update description to describe a more general case
|
Update description to describe a more general case
|
Python
|
apache-2.0
|
numan/sandsnake
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name="sandsnake",
license='Apache License 2.0',
version="0.1.0",
description="Manage activity indexes for objects.",
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="[email protected]",
url="https://github.com/numan/sandsnake",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus>=0.10.5',
'redis>=2.7.2',
'python-dateutil==1.5',
],
tests_require=[
'nose>=1.0',
],
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
## Instruction:
Update description to describe a more general case
## Code After:
from setuptools import setup, find_packages
setup(
name="sandsnake",
license='Apache License 2.0',
version="0.1.0",
description="Sorted indexes backed by redis.",
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="[email protected]",
url="https://github.com/numan/sandsnake",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus>=0.10.5',
'redis>=2.7.2',
'python-dateutil==1.5',
],
tests_require=[
'nose>=1.0',
],
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
...
name="sandsnake",
license='Apache License 2.0',
version="0.1.0",
description="Sorted indexes backed by redis.",
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="[email protected]",
...
|
a718c85a2d55da2372278dcd9ae8977fd53197c3
|
ios/KontaktBeacons.h
|
ios/KontaktBeacons.h
|
#import "RCTBridgeModule.h"
#else
#import <React/RCTBridgeModule.h>
#endif
#if __has_include("RCTEventEmitter.h")
#import "RCTEventEmitter.h"
#else
#import <React/RCTEventEmitter.h>
#endif
@interface KontaktBeacons : RCTEventEmitter <RCTBridgeModule>
@end
|
#import <React/RCTBridgeModule.h>
#else
#import "RCTBridgeModule.h"
#endif
#if __has_include(<React/RCTEventEmitter.h>)
#import <React/RCTEventEmitter.h>
#else
#import "RCTEventEmitter.h"
#endif
@interface KontaktBeacons : RCTEventEmitter <RCTBridgeModule>
@end
|
Fix imports for React Native 0.48.x.
|
Fix imports for React Native 0.48.x.
|
C
|
mit
|
Artirigo/react-native-kontaktio,Artirigo/react-native-kontaktio,Artirigo/react-native-kontaktio,Artirigo/react-native-kontaktio,Artirigo/react-native-kontaktio
|
c
|
## Code Before:
#import "RCTBridgeModule.h"
#else
#import <React/RCTBridgeModule.h>
#endif
#if __has_include("RCTEventEmitter.h")
#import "RCTEventEmitter.h"
#else
#import <React/RCTEventEmitter.h>
#endif
@interface KontaktBeacons : RCTEventEmitter <RCTBridgeModule>
@end
## Instruction:
Fix imports for React Native 0.48.x.
## Code After:
#import <React/RCTBridgeModule.h>
#else
#import "RCTBridgeModule.h"
#endif
#if __has_include(<React/RCTEventEmitter.h>)
#import <React/RCTEventEmitter.h>
#else
#import "RCTEventEmitter.h"
#endif
@interface KontaktBeacons : RCTEventEmitter <RCTBridgeModule>
@end
|
...
#import <React/RCTBridgeModule.h>
#else
#import "RCTBridgeModule.h"
#endif
#if __has_include(<React/RCTEventEmitter.h>)
#import <React/RCTEventEmitter.h>
#else
#import "RCTEventEmitter.h"
#endif
...
|
8ea90a83318e4c1cb01b773435ef4861a459ac0f
|
indra/sources/utils.py
|
indra/sources/utils.py
|
"""Processor for remote INDRA JSON files."""
import requests
from typing import List
from ..statements import Statement, stmts_from_json
__all__ = [
'RemoteProcessor',
]
class RemoteProcessor:
"""A processor for INDRA JSON file to be retrieved by URL.
Parameters
----------
url :
The URL of the INDRA JSON file to load
"""
#: The URL of the data
url: str
#: A list of statements
statements: List[Statement]
def __init__(self, url: str):
self.url = url
self.statements = []
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
self.statements = stmts_from_json(res.json())
return self.statements
|
"""Processor for remote INDRA JSON files."""
from collections import Counter
import requests
from typing import List
from ..statements import Statement, stmts_from_json
__all__ = [
'RemoteProcessor',
]
class RemoteProcessor:
"""A processor for INDRA JSON file to be retrieved by URL.
Parameters
----------
url :
The URL of the INDRA JSON file to load
"""
#: The URL of the data
url: str
def __init__(self, url: str):
self.url = url
self._statements = None
@property
def statements(self) -> List[Statement]:
"""The extracted statements."""
if self._statements is None:
self.extract_statements()
return self._statements
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
self._statements = stmts_from_json(res.json())
return self._statements
def print_summary(self) -> None:
"""print a summary of the statements."""
from tabulate import tabulate
print(tabulate(
Counter(
statement.__class__.__name__
for statement in self.statements
).most_common(),
headers=["Statement Type", "Count"],
))
|
Implement autoloading and summary function
|
Implement autoloading and summary function
|
Python
|
bsd-2-clause
|
sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,bgyori/indra,bgyori/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra
|
python
|
## Code Before:
"""Processor for remote INDRA JSON files."""
import requests
from typing import List
from ..statements import Statement, stmts_from_json
__all__ = [
'RemoteProcessor',
]
class RemoteProcessor:
"""A processor for INDRA JSON file to be retrieved by URL.
Parameters
----------
url :
The URL of the INDRA JSON file to load
"""
#: The URL of the data
url: str
#: A list of statements
statements: List[Statement]
def __init__(self, url: str):
self.url = url
self.statements = []
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
self.statements = stmts_from_json(res.json())
return self.statements
## Instruction:
Implement autoloading and summary function
## Code After:
"""Processor for remote INDRA JSON files."""
from collections import Counter
import requests
from typing import List
from ..statements import Statement, stmts_from_json
__all__ = [
'RemoteProcessor',
]
class RemoteProcessor:
"""A processor for INDRA JSON file to be retrieved by URL.
Parameters
----------
url :
The URL of the INDRA JSON file to load
"""
#: The URL of the data
url: str
def __init__(self, url: str):
self.url = url
self._statements = None
@property
def statements(self) -> List[Statement]:
"""The extracted statements."""
if self._statements is None:
self.extract_statements()
return self._statements
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
self._statements = stmts_from_json(res.json())
return self._statements
def print_summary(self) -> None:
"""print a summary of the statements."""
from tabulate import tabulate
print(tabulate(
Counter(
statement.__class__.__name__
for statement in self.statements
).most_common(),
headers=["Statement Type", "Count"],
))
|
// ... existing code ...
"""Processor for remote INDRA JSON files."""
from collections import Counter
import requests
from typing import List
// ... modified code ...
#: The URL of the data
url: str
def __init__(self, url: str):
self.url = url
self._statements = None
@property
def statements(self) -> List[Statement]:
"""The extracted statements."""
if self._statements is None:
self.extract_statements()
return self._statements
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
self._statements = stmts_from_json(res.json())
return self._statements
def print_summary(self) -> None:
"""print a summary of the statements."""
from tabulate import tabulate
print(tabulate(
Counter(
statement.__class__.__name__
for statement in self.statements
).most_common(),
headers=["Statement Type", "Count"],
))
// ... rest of the code ...
|
f78118a2fbbb28bfb52aa497c4736f5fce0eb0b3
|
src/main/kotlin/org/rust/lang/refactoring/RsNamesValidator.kt
|
src/main/kotlin/org/rust/lang/refactoring/RsNamesValidator.kt
|
package org.rust.lang.refactoring
import com.intellij.lang.refactoring.NamesValidator
import com.intellij.openapi.project.Project
import com.intellij.psi.tree.IElementType
import org.rust.lang.core.lexer.RsLexer
import org.rust.lang.core.psi.RS_KEYWORDS
import org.rust.lang.core.psi.RsElementTypes.IDENTIFIER
import org.rust.lang.core.psi.RsElementTypes.QUOTE_IDENTIFIER
class RsNamesValidator : NamesValidator {
override fun isKeyword(name: String, project: Project?): Boolean =
isKeyword(name, project, true)
fun isKeyword(name: String, @Suppress("UNUSED_PARAMETER") project: Project?, withPrimitives: Boolean): Boolean =
getLexerType(name) in RS_KEYWORDS || (withPrimitives && name in PrimitiveTypes)
override fun isIdentifier(name: String, project: Project?): Boolean =
isIdentifier(name, project, true)
fun isIdentifier(name: String, @Suppress("UNUSED_PARAMETER") project: Project?, withPrimitives: Boolean): Boolean =
when (getLexerType(name)) {
IDENTIFIER -> !withPrimitives || name !in PrimitiveTypes
QUOTE_IDENTIFIER -> true
else -> false
}
private fun getLexerType(text: String): IElementType? {
val lexer = RsLexer()
lexer.start(text)
if (lexer.tokenEnd == text.length) {
return lexer.tokenType
} else {
return null
}
}
companion object {
val PrimitiveTypes = arrayOf(
"bool",
"char",
"i8",
"i16",
"i32",
"i64",
"u8",
"u16",
"u32",
"u64",
"isize",
"usize",
"f32",
"f64",
"str"
)
val PredefinedLifetimes = arrayOf("'static")
}
}
|
package org.rust.lang.refactoring
import com.intellij.lang.refactoring.NamesValidator
import com.intellij.openapi.project.Project
import com.intellij.psi.tree.IElementType
import org.rust.lang.core.lexer.RsLexer
import org.rust.lang.core.psi.RS_KEYWORDS
import org.rust.lang.core.psi.RsElementTypes.IDENTIFIER
import org.rust.lang.core.psi.RsElementTypes.QUOTE_IDENTIFIER
class RsNamesValidator : NamesValidator {
override fun isKeyword(name: String, project: Project?): Boolean = getLexerType(name) in RS_KEYWORDS
override fun isIdentifier(name: String, project: Project?): Boolean =
when (getLexerType(name)) {
IDENTIFIER, QUOTE_IDENTIFIER -> true
else -> false
}
private fun getLexerType(text: String): IElementType? {
val lexer = RsLexer()
lexer.start(text)
return if (lexer.tokenEnd == text.length) lexer.tokenType else null
}
companion object {
val PredefinedLifetimes = arrayOf("'static")
}
}
|
Allow to use primitive type names in renaming
|
REN: Allow to use primitive type names in renaming
|
Kotlin
|
mit
|
d9n/intellij-rust,d9n/intellij-rust,anton-okolelov/intellij-rust,alygin/intellij-rust,intellij-rust/intellij-rust,intellij-rust/intellij-rust,intellij-rust/intellij-rust,d9n/intellij-rust,Undin/intellij-rust,himikof/intellij-rust,alygin/intellij-rust,intellij-rust/intellij-rust,himikof/intellij-rust,intellij-rust/intellij-rust,Undin/intellij-rust,himikof/intellij-rust,Undin/intellij-rust,alygin/intellij-rust,Undin/intellij-rust,d9n/intellij-rust,alygin/intellij-rust,anton-okolelov/intellij-rust,intellij-rust/intellij-rust,Undin/intellij-rust,himikof/intellij-rust,anton-okolelov/intellij-rust,anton-okolelov/intellij-rust,Undin/intellij-rust,alygin/intellij-rust,himikof/intellij-rust,anton-okolelov/intellij-rust,d9n/intellij-rust
|
kotlin
|
## Code Before:
package org.rust.lang.refactoring
import com.intellij.lang.refactoring.NamesValidator
import com.intellij.openapi.project.Project
import com.intellij.psi.tree.IElementType
import org.rust.lang.core.lexer.RsLexer
import org.rust.lang.core.psi.RS_KEYWORDS
import org.rust.lang.core.psi.RsElementTypes.IDENTIFIER
import org.rust.lang.core.psi.RsElementTypes.QUOTE_IDENTIFIER
class RsNamesValidator : NamesValidator {
override fun isKeyword(name: String, project: Project?): Boolean =
isKeyword(name, project, true)
fun isKeyword(name: String, @Suppress("UNUSED_PARAMETER") project: Project?, withPrimitives: Boolean): Boolean =
getLexerType(name) in RS_KEYWORDS || (withPrimitives && name in PrimitiveTypes)
override fun isIdentifier(name: String, project: Project?): Boolean =
isIdentifier(name, project, true)
fun isIdentifier(name: String, @Suppress("UNUSED_PARAMETER") project: Project?, withPrimitives: Boolean): Boolean =
when (getLexerType(name)) {
IDENTIFIER -> !withPrimitives || name !in PrimitiveTypes
QUOTE_IDENTIFIER -> true
else -> false
}
private fun getLexerType(text: String): IElementType? {
val lexer = RsLexer()
lexer.start(text)
if (lexer.tokenEnd == text.length) {
return lexer.tokenType
} else {
return null
}
}
companion object {
val PrimitiveTypes = arrayOf(
"bool",
"char",
"i8",
"i16",
"i32",
"i64",
"u8",
"u16",
"u32",
"u64",
"isize",
"usize",
"f32",
"f64",
"str"
)
val PredefinedLifetimes = arrayOf("'static")
}
}
## Instruction:
REN: Allow to use primitive type names in renaming
## Code After:
package org.rust.lang.refactoring
import com.intellij.lang.refactoring.NamesValidator
import com.intellij.openapi.project.Project
import com.intellij.psi.tree.IElementType
import org.rust.lang.core.lexer.RsLexer
import org.rust.lang.core.psi.RS_KEYWORDS
import org.rust.lang.core.psi.RsElementTypes.IDENTIFIER
import org.rust.lang.core.psi.RsElementTypes.QUOTE_IDENTIFIER
class RsNamesValidator : NamesValidator {
override fun isKeyword(name: String, project: Project?): Boolean = getLexerType(name) in RS_KEYWORDS
override fun isIdentifier(name: String, project: Project?): Boolean =
when (getLexerType(name)) {
IDENTIFIER, QUOTE_IDENTIFIER -> true
else -> false
}
private fun getLexerType(text: String): IElementType? {
val lexer = RsLexer()
lexer.start(text)
return if (lexer.tokenEnd == text.length) lexer.tokenType else null
}
companion object {
val PredefinedLifetimes = arrayOf("'static")
}
}
|
// ... existing code ...
import org.rust.lang.core.psi.RsElementTypes.QUOTE_IDENTIFIER
class RsNamesValidator : NamesValidator {
override fun isKeyword(name: String, project: Project?): Boolean = getLexerType(name) in RS_KEYWORDS
override fun isIdentifier(name: String, project: Project?): Boolean =
when (getLexerType(name)) {
IDENTIFIER, QUOTE_IDENTIFIER -> true
else -> false
}
// ... modified code ...
private fun getLexerType(text: String): IElementType? {
val lexer = RsLexer()
lexer.start(text)
return if (lexer.tokenEnd == text.length) lexer.tokenType else null
}
companion object {
val PredefinedLifetimes = arrayOf("'static")
}
}
// ... rest of the code ...
|
b26ce5b5ff778208314bfd21014f88ee24917d7a
|
ideas/views.py
|
ideas/views.py
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request):
if request.method == 'POST':
idea = Idea.objects.get(pk=request.data)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
Add GET for idea and refactor vote
|
Add GET for idea and refactor vote
|
Python
|
mit
|
neosergio/vote_hackatrix_backend
|
python
|
## Code Before:
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request):
if request.method == 'POST':
idea = Idea.objects.get(pk=request.data)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
## Instruction:
Add GET for idea and refactor vote
## Code After:
from .models import Idea
from .serializers import IdeaSerializer
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET',])
def idea_list(request):
if request.method == 'GET':
ideas = Idea.objects.all()
serializer = IdeaSerializer(ideas, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
serializer = IdeaSerializer(ideas_ordered, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
|
# ... existing code ...
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def idea(request, pk):
if request.method == 'GET':
idea = Idea.objects.get(pk=pk)
serializer = IdeaSerializer(idea)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['GET',])
def results(request):
if request.method == 'GET':
ideas_ordered = Idea.objects.order_by('-votes')
# ... modified code ...
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST',])
def vote(request, pk):
if request.method == 'POST':
idea = Idea.objects.get(pk=pk)
idea.votes += 1
idea.save()
serializer = IdeaSerializer(idea)
# ... rest of the code ...
|
926c4662c7b3059503bd0a22ee9624bb39ab40fd
|
sharepa/__init__.py
|
sharepa/__init__.py
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
from sharepa.helpers import source_agg, source_counts
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
|
Remove helper functions from sharepa init
|
Remove helper functions from sharepa init
|
Python
|
mit
|
samanehsan/sharepa,erinspace/sharepa,CenterForOpenScience/sharepa,fabianvf/sharepa
|
python
|
## Code Before:
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
from sharepa.helpers import source_agg, source_counts
## Instruction:
Remove helper functions from sharepa init
## Code After:
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
|
...
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
...
|
a925c19b85fcd3a2b6d08d253d3c8d1ef3c7b02f
|
core/migrations/0008_auto_20151029_0953.py
|
core/migrations/0008_auto_20151029_0953.py
|
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.get(app_label='auth', model='user')
Permission.objects.create(content_type=content_type,
codename='impersonate',
name='Can impersonate other user')
def delete_impersonate_permission(apps, schema_editor):
apps.get_model('auth.Permission').objects.get(
codename='impersonate').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
Update latest migration to use the database provided to the migrate management command
|
Update latest migration to use the database provided to the migrate management command
|
Python
|
agpl-3.0
|
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.get(app_label='auth', model='user')
Permission.objects.create(content_type=content_type,
codename='impersonate',
name='Can impersonate other user')
def delete_impersonate_permission(apps, schema_editor):
apps.get_model('auth.Permission').objects.get(
codename='impersonate').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
## Instruction:
Update latest migration to use the database provided to the migrate management command
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
// ... existing code ...
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
// ... rest of the code ...
|
02349b71dda26147f756385a109603f576b199c5
|
src/test/java/com/phsshp/MetricsReporterTest.java
|
src/test/java/com/phsshp/MetricsReporterTest.java
|
package com.phsshp;
import com.phsshp.testutils.matchers.MetricsMatcher;
import org.junit.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import static com.phsshp.testutils.matchers.MetricsMatcher.metricsMatching;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class MetricsReporterTest {
@Test
public void reportMetricsForJavaFiles() throws Exception {
// TODO: crappy test for now
List<File> files = Arrays.asList(
new File("src/test/resources/test-project/SomeFile.java"),
new File("src/test/resources/test-project/pkg1/AnotherInPackage1.java"));
List<Metrics> metrics = new MetricsReporter().report(files);
assertThat(metrics.size(), is(2));
assertThat(metrics.get(0).getFile().getName(), equalTo("SomeFile.java"));
assertThat(metrics.get(0).getValue(), equalTo(1));
assertThat(metrics.get(1).getFile().getName(), equalTo("AnotherInPackage1.java"));
assertThat(metrics.get(1).getValue(), equalTo(3));
assertThat(metrics.get(0), metricsMatching("SomeFile.java", 1));
}
}
|
package com.phsshp;
import org.junit.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import static com.phsshp.testutils.matchers.MetricsMatcher.metricsMatching;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
public class MetricsReporterTest {
@Test
public void reportMetricsForJavaFiles() throws Exception {
List<File> files = Arrays.asList(
new File("src/test/resources/test-project/SomeFile.java"),
new File("src/test/resources/test-project/pkg1/AnotherInPackage1.java"));
List<Metrics> metrics = new MetricsReporter().report(files);
assertThat(metrics, contains(
metricsMatching("SomeFile.java", 1),
metricsMatching("AnotherInPackage1.java", 3)));
}
}
|
Refactor test to use matchers
|
Refactor test to use matchers
|
Java
|
mit
|
phss/java-code-metrics
|
java
|
## Code Before:
package com.phsshp;
import com.phsshp.testutils.matchers.MetricsMatcher;
import org.junit.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import static com.phsshp.testutils.matchers.MetricsMatcher.metricsMatching;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class MetricsReporterTest {
@Test
public void reportMetricsForJavaFiles() throws Exception {
// TODO: crappy test for now
List<File> files = Arrays.asList(
new File("src/test/resources/test-project/SomeFile.java"),
new File("src/test/resources/test-project/pkg1/AnotherInPackage1.java"));
List<Metrics> metrics = new MetricsReporter().report(files);
assertThat(metrics.size(), is(2));
assertThat(metrics.get(0).getFile().getName(), equalTo("SomeFile.java"));
assertThat(metrics.get(0).getValue(), equalTo(1));
assertThat(metrics.get(1).getFile().getName(), equalTo("AnotherInPackage1.java"));
assertThat(metrics.get(1).getValue(), equalTo(3));
assertThat(metrics.get(0), metricsMatching("SomeFile.java", 1));
}
}
## Instruction:
Refactor test to use matchers
## Code After:
package com.phsshp;
import org.junit.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import static com.phsshp.testutils.matchers.MetricsMatcher.metricsMatching;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
public class MetricsReporterTest {
@Test
public void reportMetricsForJavaFiles() throws Exception {
List<File> files = Arrays.asList(
new File("src/test/resources/test-project/SomeFile.java"),
new File("src/test/resources/test-project/pkg1/AnotherInPackage1.java"));
List<Metrics> metrics = new MetricsReporter().report(files);
assertThat(metrics, contains(
metricsMatching("SomeFile.java", 1),
metricsMatching("AnotherInPackage1.java", 3)));
}
}
|
# ... existing code ...
package com.phsshp;
import org.junit.Test;
import java.io.File;
# ... modified code ...
import static com.phsshp.testutils.matchers.MetricsMatcher.metricsMatching;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
public class MetricsReporterTest {
@Test
public void reportMetricsForJavaFiles() throws Exception {
List<File> files = Arrays.asList(
new File("src/test/resources/test-project/SomeFile.java"),
new File("src/test/resources/test-project/pkg1/AnotherInPackage1.java"));
...
List<Metrics> metrics = new MetricsReporter().report(files);
assertThat(metrics, contains(
metricsMatching("SomeFile.java", 1),
metricsMatching("AnotherInPackage1.java", 3)));
}
}
# ... rest of the code ...
|
2e4b3f3dc8e0f949700c810912e32a2dffa2def3
|
ttag/__init__.py
|
ttag/__init__.py
|
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, ModelInstanceArg, \
StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
VERSION = (1, 0, 'alpha')
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
try:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
Work around an error if ttag is installed at the same time as Django
|
Work around an error if ttag is installed at the same time as Django
|
Python
|
bsd-3-clause
|
caktus/django-ttag,caktus/django-ttag,matuu/django-ttag,matuu/django-ttag,lincolnloop/django-ttag,lincolnloop/django-ttag
|
python
|
## Code Before:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, ModelInstanceArg, \
StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
VERSION = (1, 0, 'alpha')
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
## Instruction:
Work around an error if ttag is installed at the same time as Django
## Code After:
try:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
...
try:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
...
|
301593cbbd25e8cfce3998450f4954ce0610f23e
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='python-ev3',
version='0.1',
description='Python library of Lego EV3',
author='Gong Yi',
author_email='[email protected]',
url='https://github.com/topikachu/python-ev3',
packages=['ev3', 'ev3.rawdevice'],
)
|
from distutils.core import setup
setup(name='python-ev3',
version='0.1',
description='Python library of Lego EV3',
author='Gong Yi',
author_email='[email protected]',
url='https://github.com/topikachu/python-ev3',
packages=['ev3', 'ev3.rawdevice', 'ev3.motor', 'ev3.sensor']
)
|
Update list of packages to install.
|
Update list of packages to install.
|
Python
|
apache-2.0
|
MaxNoe/python-ev3,topikachu/python-ev3,evz/python-ev3,MaxNoe/python-ev3,topikachu/python-ev3,evz/python-ev3
|
python
|
## Code Before:
from distutils.core import setup
setup(name='python-ev3',
version='0.1',
description='Python library of Lego EV3',
author='Gong Yi',
author_email='[email protected]',
url='https://github.com/topikachu/python-ev3',
packages=['ev3', 'ev3.rawdevice'],
)
## Instruction:
Update list of packages to install.
## Code After:
from distutils.core import setup
setup(name='python-ev3',
version='0.1',
description='Python library of Lego EV3',
author='Gong Yi',
author_email='[email protected]',
url='https://github.com/topikachu/python-ev3',
packages=['ev3', 'ev3.rawdevice', 'ev3.motor', 'ev3.sensor']
)
|
...
author='Gong Yi',
author_email='[email protected]',
url='https://github.com/topikachu/python-ev3',
packages=['ev3', 'ev3.rawdevice', 'ev3.motor', 'ev3.sensor']
)
...
|
eecb3468b581b4854f2162c2b62ac06ea744045e
|
malcolm/core/attributemeta.py
|
malcolm/core/attributemeta.py
|
from collections import OrderedDict
from malcolm.core.serializable import Serializable
class AttributeMeta(Serializable):
"""Abstract base class for Meta objects"""
# Type constants
SCALAR = "scalar"
TABLE = "table"
SCALARARRAY = "scalar_array"
def __init__(self, name, description, *args):
super(AttributeMeta, self).__init__(name, *args)
self.description = description
def validate(self, value):
"""
Abstract function to validate a given value
Args:
value(abstract): Value to validate
"""
raise NotImplementedError(
"Abstract validate function must be implemented in child classes")
def to_dict(self):
"""Convert object attributes into a dictionary"""
d = OrderedDict()
d["description"] = self.description
d["typeid"] = self.typeid
return d
|
from collections import OrderedDict
from malcolm.core.serializable import Serializable
class AttributeMeta(Serializable):
"""Abstract base class for Meta objects"""
def __init__(self, name, description, *args):
super(AttributeMeta, self).__init__(name, *args)
self.description = description
def validate(self, value):
"""
Abstract function to validate a given value
Args:
value(abstract): Value to validate
"""
raise NotImplementedError(
"Abstract validate function must be implemented in child classes")
def to_dict(self):
"""Convert object attributes into a dictionary"""
d = OrderedDict()
d["description"] = self.description
d["typeid"] = self.typeid
return d
|
Remove unused AttributeMeta type constants
|
Remove unused AttributeMeta type constants
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
python
|
## Code Before:
from collections import OrderedDict
from malcolm.core.serializable import Serializable
class AttributeMeta(Serializable):
"""Abstract base class for Meta objects"""
# Type constants
SCALAR = "scalar"
TABLE = "table"
SCALARARRAY = "scalar_array"
def __init__(self, name, description, *args):
super(AttributeMeta, self).__init__(name, *args)
self.description = description
def validate(self, value):
"""
Abstract function to validate a given value
Args:
value(abstract): Value to validate
"""
raise NotImplementedError(
"Abstract validate function must be implemented in child classes")
def to_dict(self):
"""Convert object attributes into a dictionary"""
d = OrderedDict()
d["description"] = self.description
d["typeid"] = self.typeid
return d
## Instruction:
Remove unused AttributeMeta type constants
## Code After:
from collections import OrderedDict
from malcolm.core.serializable import Serializable
class AttributeMeta(Serializable):
"""Abstract base class for Meta objects"""
def __init__(self, name, description, *args):
super(AttributeMeta, self).__init__(name, *args)
self.description = description
def validate(self, value):
"""
Abstract function to validate a given value
Args:
value(abstract): Value to validate
"""
raise NotImplementedError(
"Abstract validate function must be implemented in child classes")
def to_dict(self):
"""Convert object attributes into a dictionary"""
d = OrderedDict()
d["description"] = self.description
d["typeid"] = self.typeid
return d
|
...
class AttributeMeta(Serializable):
"""Abstract base class for Meta objects"""
def __init__(self, name, description, *args):
super(AttributeMeta, self).__init__(name, *args)
...
|
35a15e06feca24872acb42c5395b58b2a1bed60e
|
byceps/services/snippet/transfer/models.py
|
byceps/services/snippet/transfer/models.py
|
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_global(cls) -> Scope:
return cls('global', 'global')
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
|
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
|
Remove unused class method `Scope.for_global`
|
Remove unused class method `Scope.for_global`
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
python
|
## Code Before:
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_global(cls) -> Scope:
return cls('global', 'global')
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
## Instruction:
Remove unused class method `Scope.for_global`
## Code After:
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
|
# ... existing code ...
class Scope:
type_: str
name: str
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
# ... rest of the code ...
|
f967ba433284c573cbce47d84ae55c209801ad6e
|
ash/PRESUBMIT.py
|
ash/PRESUBMIT.py
|
def GetPreferredTrySlaves():
return ['linux_chromeos']
|
def GetPreferredTrySlaves():
return ['linux_chromeos', 'linux_chromeos_clang']
|
Add linux_chromeos_clang to the list of automatic trybots.
|
Add linux_chromeos_clang to the list of automatic trybots.
BUG=none
TEST=none
Review URL: https://chromiumcodereview.appspot.com/10833037
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@148600 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
krieger-od/nwjs_chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,anirudhSK/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,ondra-novak/chromium.src,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,patrickm/chromium.src,nacl-webkit/chrome_deps,littlstar/chromium.src,zcbenz/cefode-chromium,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,anirudhSK/chromium,mogoweb/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,dednal/chromium.src,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,hujiajie/pa-chromium,M4sse/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,hujiajie/pa-chromium,dushu1203/chromium.src,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,jaruba/chromium.src,dushu1203/chromium.src,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,Chilledheart/chromium,dushu1203/chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,dednal/chromium.src,hgl888/chromium-crosswalk,patrickm/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,junmin-zhu/chromium-rivertrail,Chilledheart/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,littlstar/chromium.src,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,anirudhSK/chromium,Just-D/chromium-1,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk-efl,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,jaruba/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,Jonekee/chromium.src,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,Chilledheart/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,littlstar/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,jaruba/chromium.src,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,ltilve/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,anirudhSK/chromium,ltilve/chromium,Fireblend/chromium-crosswalk,ltilve/chromium,patrickm/chromium.src,mogoweb/chromium-crosswalk,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,markYoungH/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,chuan9/chromium-crosswalk,dushu1203/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,littlstar/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,timopulkkinen/BubbleFish,hujiajie/pa-chromium,Just-D/chromium-1,ltilve/chromium,jaruba/chromium.src,dednal/chromium.src,hujiajie/pa-chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk
|
python
|
## Code Before:
def GetPreferredTrySlaves():
return ['linux_chromeos']
## Instruction:
Add linux_chromeos_clang to the list of automatic trybots.
BUG=none
TEST=none
Review URL: https://chromiumcodereview.appspot.com/10833037
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@148600 0039d316-1c4b-4281-b951-d872f2087c98
## Code After:
def GetPreferredTrySlaves():
return ['linux_chromeos', 'linux_chromeos_clang']
|
# ... existing code ...
def GetPreferredTrySlaves():
return ['linux_chromeos', 'linux_chromeos_clang']
# ... rest of the code ...
|
22125d8b84dc6a6ccf7eb72e34fd30642cfb883c
|
src/main/java/ip/cl/clipapp/Application.java
|
src/main/java/ip/cl/clipapp/Application.java
|
package ip.cl.clipapp;
import java.net.URI;
import java.net.URISyntaxException;
import javax.sql.DataSource;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Profile;
@ComponentScan
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Profile(ClipAppProfile.HEROKU)
@Bean
public DataSource dataSource() throws URISyntaxException {
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath();
return DataSourceBuilder.create()
.driverClassName("org.postgresql.Driver")
.username(username)
.password(password)
.url(dbUrl)
.build();
}
}
|
package ip.cl.clipapp;
import java.net.URI;
import java.net.URISyntaxException;
import javax.sql.DataSource;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.autoconfigure.web.ErrorMvcAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Profile;
@ComponentScan
@EnableAutoConfiguration(exclude = { ErrorMvcAutoConfiguration.class })
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Profile(ClipAppProfile.HEROKU)
@Bean
public DataSource dataSource() throws URISyntaxException {
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath();
return DataSourceBuilder.create()
.driverClassName("org.postgresql.Driver")
.username(username)
.password(password)
.url(dbUrl)
.build();
}
}
|
Remove default while label error page
|
Remove default while label error page
|
Java
|
mit
|
pzn/clipapp,pzn/clipapp
|
java
|
## Code Before:
package ip.cl.clipapp;
import java.net.URI;
import java.net.URISyntaxException;
import javax.sql.DataSource;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Profile;
@ComponentScan
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Profile(ClipAppProfile.HEROKU)
@Bean
public DataSource dataSource() throws URISyntaxException {
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath();
return DataSourceBuilder.create()
.driverClassName("org.postgresql.Driver")
.username(username)
.password(password)
.url(dbUrl)
.build();
}
}
## Instruction:
Remove default while label error page
## Code After:
package ip.cl.clipapp;
import java.net.URI;
import java.net.URISyntaxException;
import javax.sql.DataSource;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.autoconfigure.web.ErrorMvcAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Profile;
@ComponentScan
@EnableAutoConfiguration(exclude = { ErrorMvcAutoConfiguration.class })
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Profile(ClipAppProfile.HEROKU)
@Bean
public DataSource dataSource() throws URISyntaxException {
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath();
return DataSourceBuilder.create()
.driverClassName("org.postgresql.Driver")
.username(username)
.password(password)
.url(dbUrl)
.build();
}
}
|
# ... existing code ...
import javax.sql.DataSource;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.autoconfigure.web.ErrorMvcAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Profile;
@ComponentScan
@EnableAutoConfiguration(exclude = { ErrorMvcAutoConfiguration.class })
@SpringBootApplication
public class Application {
# ... rest of the code ...
|
a116c3eae892a73b11372225a9bdf0194db75598
|
glanerbeard/web.py
|
glanerbeard/web.py
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return str(shows)
if __name__ == '__main__':
app.debug = True
app.run()
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return render_template('json.html', json=shows)
|
Use a template to render json.
|
Use a template to render json.
|
Python
|
apache-2.0
|
daenney/glanerbeard
|
python
|
## Code Before:
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return str(shows)
if __name__ == '__main__':
app.debug = True
app.run()
## Instruction:
Use a template to render json.
## Code After:
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return render_template('json.html', json=shows)
|
...
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return render_template('json.html', json=shows)
...
|
56441d42ed87e2adad8b36c25cf695b0747a8c16
|
tests/djworkflows/models.py
|
tests/djworkflows/models.py
|
from django.db import models as djmodels
import xworkflows
from django_xworkflows import models
class MyWorkflow(xworkflows.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(xworkflows.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow
state2 = MyAltWorkflow
|
from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
|
Use imports from django_xworkflows instead of imports from xworkflows in tests
|
Use imports from django_xworkflows instead of imports from xworkflows in tests
Signed-off-by: Raphaël Barrois <[email protected]>
|
Python
|
bsd-2-clause
|
rbarrois/django_xworkflows
|
python
|
## Code Before:
from django.db import models as djmodels
import xworkflows
from django_xworkflows import models
class MyWorkflow(xworkflows.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(xworkflows.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow
state2 = MyAltWorkflow
## Instruction:
Use imports from django_xworkflows instead of imports from xworkflows in tests
Signed-off-by: Raphaël Barrois <[email protected]>
## Code After:
from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
('gobaz', ('foo', 'bar'), 'baz'),
('bazbar', 'baz', 'bar'),
)
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
('c', 'StateC'),
)
transitions = (
('tob', ('a', 'c'), 'b'),
('toa', ('b', 'c'), 'a'),
('toc', ('a', 'b'), 'c'),
)
initial_state = 'a'
class MyWorkflowEnabled(models.WorkflowEnabled, djmodels.Model):
state = MyWorkflow
def gobaz(self, foo):
return foo * 2
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
|
// ... existing code ...
from django.db import models as djmodels
from django_xworkflows import models
class MyWorkflow(models.Workflow):
states = ('foo', 'bar', 'baz')
transitions = (
('foobar', 'foo', 'bar'),
// ... modified code ...
initial_state = 'foo'
class MyAltWorkflow(models.Workflow):
states = (
('a', 'StateA'),
('b', 'StateB'),
...
class WithTwoWorkflows(models.WorkflowEnabled, djmodels.Model):
state1 = MyWorkflow()
state2 = MyAltWorkflow()
// ... rest of the code ...
|
8187591a0f8255487f4b16b653ba5070bfffe739
|
specs/test_diff.py
|
specs/test_diff.py
|
'''
This is an example of a python test
that compares a diff function (in this case
a hardcoded one that doesn't work) to the
reference JSON to check compliance.
'''
from nose.tools import eq_
import json
def diff(before, after):
return []
def test_diffs():
test_cases = json.load(open('test_cases.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
|
'''
This is an example of a python test
that compares a diff function (in this case
a hardcoded one that doesn't work) to the
reference JSON to check compliance.
'''
from nose.tools import eq_
import json
def diff(before, after):
return []
def test_diffs():
test_cases = json.load(open('test_cases_simple.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
def test_diffs_cells():
test_cases = json.load(open('test_cases_cells.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
|
Add code to specs example test
|
Add code to specs example test
|
Python
|
mit
|
tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff
|
python
|
## Code Before:
'''
This is an example of a python test
that compares a diff function (in this case
a hardcoded one that doesn't work) to the
reference JSON to check compliance.
'''
from nose.tools import eq_
import json
def diff(before, after):
return []
def test_diffs():
test_cases = json.load(open('test_cases.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
## Instruction:
Add code to specs example test
## Code After:
'''
This is an example of a python test
that compares a diff function (in this case
a hardcoded one that doesn't work) to the
reference JSON to check compliance.
'''
from nose.tools import eq_
import json
def diff(before, after):
return []
def test_diffs():
test_cases = json.load(open('test_cases_simple.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
def test_diffs_cells():
test_cases = json.load(open('test_cases_cells.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
|
# ... existing code ...
def diff(before, after):
return []
def test_diffs():
test_cases = json.load(open('test_cases_simple.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
def test_diffs_cells():
test_cases = json.load(open('test_cases_cells.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
# ... rest of the code ...
|
d0374f256b58ed3cb8194e4b46a62b97aee990e1
|
tests/test_core_lexer.py
|
tests/test_core_lexer.py
|
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
|
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
|
Add tests for reindenting line
|
Add tests for reindenting line
|
Python
|
mit
|
9seconds/concierge,9seconds/sshrc
|
python
|
## Code Before:
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
## Instruction:
Add tests for reindenting line
## Code After:
import sshrc.core.lexer as lexer
import pytest
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
|
// ... existing code ...
))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize("input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")
))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
// ... rest of the code ...
|
a20ffb81801a5f96af47ccf4bf7fe0133e74102b
|
source/views.py
|
source/views.py
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
Add possibility to set fields
|
Add possibility to set fields
|
Python
|
mit
|
iktw/django-rest-enum-view
|
python
|
## Code Before:
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
## Instruction:
Add possibility to set fields
## Code After:
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
# ... existing code ...
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
# ... rest of the code ...
|
4425aa1170a1acd3ed69c32ba5e3885301593524
|
salt/returners/redis_return.py
|
salt/returners/redis_return.py
|
'''
Return data to a redis server
To enable this returner the minion will need the python client for redis
installed and the following values configured in the minion or master
config, these are the defaults:
redis.db: '0'
redis.host: 'salt'
redis.port: 6379
'''
# Import python libs
import json
try:
import redis
has_redis = True
except ImportError:
has_redis = False
def __virtual__():
if not has_redis:
return False
return 'redis_return'
def _get_serv():
'''
Return a redis server object
'''
return redis.Redis(
host=__salt__['config.option']('redis.host'),
port=__salt__['config.option']('redis.port'),
db=__salt__['config.option']('redis.db'))
def returner(ret):
'''
Return data to a redis data store
'''
serv = _get_serv()
serv.sadd('{0}:jobs'.format(ret['id']))
serv.set('{0}:{1}'.format(ret['jid'], json.dumps(ret['return'])))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
|
'''
Return data to a redis server
To enable this returner the minion will need the python client for redis
installed and the following values configured in the minion or master
config, these are the defaults:
redis.db: '0'
redis.host: 'salt'
redis.port: 6379
'''
# Import python libs
import json
try:
import redis
has_redis = True
except ImportError:
has_redis = False
def __virtual__():
if not has_redis:
return False
return 'redis_return'
def _get_serv():
'''
Return a redis server object
'''
return redis.Redis(
host=__salt__['config.option']('redis.host'),
port=__salt__['config.option']('redis.port'),
db=__salt__['config.option']('redis.db'))
def returner(ret):
'''
Return data to a redis data store
'''
serv = _get_serv()
serv.set('{0}:{1}'.format(ret['id'], ret['jid']), json.dumps(ret))
serv.lpush('{0}:{1}'.format(ret['id'], ret['fun']), ret['jid'])
serv.sadd('minions', ret['id'])
|
Restructure redis returner, since it did notwork before anyway
|
Restructure redis returner, since it did notwork before anyway
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
python
|
## Code Before:
'''
Return data to a redis server
To enable this returner the minion will need the python client for redis
installed and the following values configured in the minion or master
config, these are the defaults:
redis.db: '0'
redis.host: 'salt'
redis.port: 6379
'''
# Import python libs
import json
try:
import redis
has_redis = True
except ImportError:
has_redis = False
def __virtual__():
if not has_redis:
return False
return 'redis_return'
def _get_serv():
'''
Return a redis server object
'''
return redis.Redis(
host=__salt__['config.option']('redis.host'),
port=__salt__['config.option']('redis.port'),
db=__salt__['config.option']('redis.db'))
def returner(ret):
'''
Return data to a redis data store
'''
serv = _get_serv()
serv.sadd('{0}:jobs'.format(ret['id']))
serv.set('{0}:{1}'.format(ret['jid'], json.dumps(ret['return'])))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
## Instruction:
Restructure redis returner, since it did notwork before anyway
## Code After:
'''
Return data to a redis server
To enable this returner the minion will need the python client for redis
installed and the following values configured in the minion or master
config, these are the defaults:
redis.db: '0'
redis.host: 'salt'
redis.port: 6379
'''
# Import python libs
import json
try:
import redis
has_redis = True
except ImportError:
has_redis = False
def __virtual__():
if not has_redis:
return False
return 'redis_return'
def _get_serv():
'''
Return a redis server object
'''
return redis.Redis(
host=__salt__['config.option']('redis.host'),
port=__salt__['config.option']('redis.port'),
db=__salt__['config.option']('redis.db'))
def returner(ret):
'''
Return data to a redis data store
'''
serv = _get_serv()
serv.set('{0}:{1}'.format(ret['id'], ret['jid']), json.dumps(ret))
serv.lpush('{0}:{1}'.format(ret['id'], ret['fun']), ret['jid'])
serv.sadd('minions', ret['id'])
|
// ... existing code ...
Return data to a redis data store
'''
serv = _get_serv()
serv.set('{0}:{1}'.format(ret['id'], ret['jid']), json.dumps(ret))
serv.lpush('{0}:{1}'.format(ret['id'], ret['fun']), ret['jid'])
serv.sadd('minions', ret['id'])
// ... rest of the code ...
|
c33aa32b868a33422f79103474cece38131a93c3
|
src/oscar/apps/customer/migrations/0005_auto_20170413_1857.py
|
src/oscar/apps/customer/migrations/0005_auto_20170413_1857.py
|
from __future__ import unicode_literals
from django.db import migrations
def forwards_func(apps, schema_editor):
User = apps.get_model("auth", "User")
for user in User.objects.all():
user.emails.update(email=user.email)
class Migration(migrations.Migration):
dependencies = [
('customer', '0004_auto_20170413_1853'),
]
operations = [
migrations.RunPython(forwards_func)
]
|
from __future__ import unicode_literals
from django.db import migrations
from oscar.core.compat import get_user_model
User = get_user_model()
def forwards_func(apps, schema_editor):
for user in User.objects.all():
user.emails.update(email=user.email)
class Migration(migrations.Migration):
dependencies = [
('customer', '0004_auto_20170413_1853'),
]
operations = [
migrations.RunPython(forwards_func)
]
|
Load current User model for customer email migration.
|
Load current User model for customer email migration.
|
Python
|
bsd-3-clause
|
solarissmoke/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,sasha0/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def forwards_func(apps, schema_editor):
User = apps.get_model("auth", "User")
for user in User.objects.all():
user.emails.update(email=user.email)
class Migration(migrations.Migration):
dependencies = [
('customer', '0004_auto_20170413_1853'),
]
operations = [
migrations.RunPython(forwards_func)
]
## Instruction:
Load current User model for customer email migration.
## Code After:
from __future__ import unicode_literals
from django.db import migrations
from oscar.core.compat import get_user_model
User = get_user_model()
def forwards_func(apps, schema_editor):
for user in User.objects.all():
user.emails.update(email=user.email)
class Migration(migrations.Migration):
dependencies = [
('customer', '0004_auto_20170413_1853'),
]
operations = [
migrations.RunPython(forwards_func)
]
|
// ... existing code ...
from django.db import migrations
from oscar.core.compat import get_user_model
User = get_user_model()
def forwards_func(apps, schema_editor):
for user in User.objects.all():
user.emails.update(email=user.email)
// ... rest of the code ...
|
244f9ad92683a1b3a3bc8409724fea9c671f38b6
|
src/mcedit2/widgets/layout.py
|
src/mcedit2/widgets/layout.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
from PySide import QtGui
def _Box(box, *a):
for arg in a:
if isinstance(arg, tuple):
item = arg[0]
else:
item = arg
arg = (item,)
if isinstance(item, QtGui.QLayout):
box.addLayout(*arg)
if isinstance(item, QtGui.QWidget):
box.addWidget(*arg)
if item is None:
box.addStretch()
return box
def Row(*a, **kw):
"""
:rtype: QHBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QHBoxLayout(**kw)
if margin:
box.setContentsMargins((margin,) * 4)
_Box(box, *a)
return box
def Column(*a, **kw):
"""
:rtype: QtGui.QVBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QVBoxLayout(**kw)
if margin:
box.setContentsMargins((margin,) * 4)
_Box(box, *a)
return box
def setWidgetError(widget, exc):
"""
Add a subwidget to `widget` that displays the error message for the exception `exc`
:param widget:
:param exc:
:return:
"""
layout = QtGui.QVBoxLayout()
layout.addWidget(QtGui.QLabel(exc.message))
layout.addStretch()
widget.setLayout(layout)
|
from __future__ import absolute_import, division, print_function, unicode_literals
from PySide import QtGui
def _Box(box, *a):
for arg in a:
if isinstance(arg, tuple):
item = arg[0]
else:
item = arg
arg = (item,)
if isinstance(item, QtGui.QLayout):
box.addLayout(*arg)
if isinstance(item, QtGui.QWidget):
box.addWidget(*arg)
if item is None:
box.addStretch()
return box
def Row(*a, **kw):
"""
:rtype: QtGui.QHBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QHBoxLayout(**kw)
_Box(box, *a)
if margin is not None:
box.setContentsMargins(margin, margin, margin, margin)
return box
def Column(*a, **kw):
"""
:rtype: QtGui.QVBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QVBoxLayout(**kw)
_Box(box, *a)
if margin is not None:
box.setContentsMargins(margin, margin, margin, margin)
return box
def setWidgetError(widget, exc):
"""
Add a subwidget to `widget` that displays the error message for the exception `exc`
:param widget:
:param exc:
:return:
"""
layout = QtGui.QVBoxLayout()
layout.addWidget(QtGui.QLabel(exc.message))
layout.addStretch()
widget.setLayout(layout)
|
Check margin keyword to Row/Column is not None
|
Check margin keyword to Row/Column is not None
|
Python
|
bsd-3-clause
|
Rubisk/mcedit2,Rubisk/mcedit2,vorburger/mcedit2,vorburger/mcedit2
|
python
|
## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
from PySide import QtGui
def _Box(box, *a):
for arg in a:
if isinstance(arg, tuple):
item = arg[0]
else:
item = arg
arg = (item,)
if isinstance(item, QtGui.QLayout):
box.addLayout(*arg)
if isinstance(item, QtGui.QWidget):
box.addWidget(*arg)
if item is None:
box.addStretch()
return box
def Row(*a, **kw):
"""
:rtype: QHBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QHBoxLayout(**kw)
if margin:
box.setContentsMargins((margin,) * 4)
_Box(box, *a)
return box
def Column(*a, **kw):
"""
:rtype: QtGui.QVBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QVBoxLayout(**kw)
if margin:
box.setContentsMargins((margin,) * 4)
_Box(box, *a)
return box
def setWidgetError(widget, exc):
"""
Add a subwidget to `widget` that displays the error message for the exception `exc`
:param widget:
:param exc:
:return:
"""
layout = QtGui.QVBoxLayout()
layout.addWidget(QtGui.QLabel(exc.message))
layout.addStretch()
widget.setLayout(layout)
## Instruction:
Check margin keyword to Row/Column is not None
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
from PySide import QtGui
def _Box(box, *a):
for arg in a:
if isinstance(arg, tuple):
item = arg[0]
else:
item = arg
arg = (item,)
if isinstance(item, QtGui.QLayout):
box.addLayout(*arg)
if isinstance(item, QtGui.QWidget):
box.addWidget(*arg)
if item is None:
box.addStretch()
return box
def Row(*a, **kw):
"""
:rtype: QtGui.QHBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QHBoxLayout(**kw)
_Box(box, *a)
if margin is not None:
box.setContentsMargins(margin, margin, margin, margin)
return box
def Column(*a, **kw):
"""
:rtype: QtGui.QVBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QVBoxLayout(**kw)
_Box(box, *a)
if margin is not None:
box.setContentsMargins(margin, margin, margin, margin)
return box
def setWidgetError(widget, exc):
"""
Add a subwidget to `widget` that displays the error message for the exception `exc`
:param widget:
:param exc:
:return:
"""
layout = QtGui.QVBoxLayout()
layout.addWidget(QtGui.QLabel(exc.message))
layout.addStretch()
widget.setLayout(layout)
|
...
def Row(*a, **kw):
"""
:rtype: QtGui.QHBoxLayout
"""
margin = kw.pop('margin', None)
box = QtGui.QHBoxLayout(**kw)
_Box(box, *a)
if margin is not None:
box.setContentsMargins(margin, margin, margin, margin)
return box
...
"""
margin = kw.pop('margin', None)
box = QtGui.QVBoxLayout(**kw)
_Box(box, *a)
if margin is not None:
box.setContentsMargins(margin, margin, margin, margin)
return box
def setWidgetError(widget, exc):
...
|
e3b1a323921b8331d7fd84c013e80a89a5b21bde
|
haproxy_status.py
|
haproxy_status.py
|
from BaseHTTPServer import BaseHTTPRequestHandler
from helpers.etcd import Etcd
from helpers.postgresql import Postgresql
import sys, yaml, socket
f = open(sys.argv[1], "r")
config = yaml.load(f.read())
f.close()
etcd = Etcd(config["etcd"])
postgresql = Postgresql(config["postgresql"])
class StatusHandler(BaseHTTPRequestHandler):
def do_GET(self):
return self.do_ANY()
def do_OPTIONS(self):
return self.do_ANY()
def do_ANY(self):
if postgresql.name == etcd.current_leader()["hostname"]:
self.send_response(200)
else:
self.send_response(503)
self.end_headers()
self.wfile.write('\r\n')
return
try:
from BaseHTTPServer import HTTPServer
host, port = config["haproxy_status"]["listen"].split(":")
server = HTTPServer((host, int(port)), StatusHandler)
print 'listening on %s:%s' % (host, port)
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
|
from BaseHTTPServer import BaseHTTPRequestHandler
from helpers.etcd import Etcd
from helpers.postgresql import Postgresql
import sys, yaml, socket
f = open(sys.argv[1], "r")
config = yaml.load(f.read())
f.close()
etcd = Etcd(config["etcd"])
postgresql = Postgresql(config["postgresql"])
class StatusHandler(BaseHTTPRequestHandler):
def do_GET(self):
return self.do_ANY()
def do_OPTIONS(self):
return self.do_ANY()
def do_ANY(self):
leader = etcd.current_leader()
is_leader = leader != None and postgresql.name == leader["hostname"]
if ((self.path == "/" or self.path == "/master") and is_leader) or (self.path == "/replica" and not is_leader):
self.send_response(200)
else:
self.send_response(503)
self.end_headers()
self.wfile.write('\r\n')
return
try:
from BaseHTTPServer import HTTPServer
host, port = config["haproxy_status"]["listen"].split(":")
server = HTTPServer((host, int(port)), StatusHandler)
print 'listening on %s:%s' % (host, port)
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
|
Add the ability to query for the replica status of a PG instance
|
Add the ability to query for the replica status of a PG instance
|
Python
|
mit
|
Tapjoy/governor
|
python
|
## Code Before:
from BaseHTTPServer import BaseHTTPRequestHandler
from helpers.etcd import Etcd
from helpers.postgresql import Postgresql
import sys, yaml, socket
f = open(sys.argv[1], "r")
config = yaml.load(f.read())
f.close()
etcd = Etcd(config["etcd"])
postgresql = Postgresql(config["postgresql"])
class StatusHandler(BaseHTTPRequestHandler):
def do_GET(self):
return self.do_ANY()
def do_OPTIONS(self):
return self.do_ANY()
def do_ANY(self):
if postgresql.name == etcd.current_leader()["hostname"]:
self.send_response(200)
else:
self.send_response(503)
self.end_headers()
self.wfile.write('\r\n')
return
try:
from BaseHTTPServer import HTTPServer
host, port = config["haproxy_status"]["listen"].split(":")
server = HTTPServer((host, int(port)), StatusHandler)
print 'listening on %s:%s' % (host, port)
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
## Instruction:
Add the ability to query for the replica status of a PG instance
## Code After:
from BaseHTTPServer import BaseHTTPRequestHandler
from helpers.etcd import Etcd
from helpers.postgresql import Postgresql
import sys, yaml, socket
f = open(sys.argv[1], "r")
config = yaml.load(f.read())
f.close()
etcd = Etcd(config["etcd"])
postgresql = Postgresql(config["postgresql"])
class StatusHandler(BaseHTTPRequestHandler):
def do_GET(self):
return self.do_ANY()
def do_OPTIONS(self):
return self.do_ANY()
def do_ANY(self):
leader = etcd.current_leader()
is_leader = leader != None and postgresql.name == leader["hostname"]
if ((self.path == "/" or self.path == "/master") and is_leader) or (self.path == "/replica" and not is_leader):
self.send_response(200)
else:
self.send_response(503)
self.end_headers()
self.wfile.write('\r\n')
return
try:
from BaseHTTPServer import HTTPServer
host, port = config["haproxy_status"]["listen"].split(":")
server = HTTPServer((host, int(port)), StatusHandler)
print 'listening on %s:%s' % (host, port)
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
|
...
def do_OPTIONS(self):
return self.do_ANY()
def do_ANY(self):
leader = etcd.current_leader()
is_leader = leader != None and postgresql.name == leader["hostname"]
if ((self.path == "/" or self.path == "/master") and is_leader) or (self.path == "/replica" and not is_leader):
self.send_response(200)
else:
self.send_response(503)
...
|
e743bcddbc53d51142f3e1277919a3f65afaad90
|
tests/conftest.py
|
tests/conftest.py
|
import base64
import betamax
import os
credentials = [os.environ.get('GH_USER', 'foo').encode(),
os.environ.get('GH_PASSWORD', 'bar').encode()]
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'never' if os.environ.get('TRAVIS_GH3') else 'once'
print('Record mode: {0}'.format(record_mode))
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'x' * 20)
)
config.define_cassette_placeholder(
'<BASIC_AUTH>',
base64.b64encode(b':'.join(credentials)).decode()
)
|
import base64
import betamax
import os
credentials = [os.environ.get('GH_USER', 'foo').encode(),
os.environ.get('GH_PASSWORD', 'bar').encode()]
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'never' if os.environ.get('TRAVIS_GH3') else 'once'
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'x' * 20)
)
config.define_cassette_placeholder(
'<BASIC_AUTH>',
base64.b64encode(b':'.join(credentials)).decode()
)
|
Revert "For travis, let us print the mode"
|
Revert "For travis, let us print the mode"
This reverts commit 0c8e9c36219214cf08b33c0ff1812e6cefa53353.
|
Python
|
bsd-3-clause
|
sigmavirus24/github3.py,agamdua/github3.py,christophelec/github3.py,jim-minter/github3.py,ueg1990/github3.py,degustaf/github3.py,balloob/github3.py,h4ck3rm1k3/github3.py,icio/github3.py,wbrefvem/github3.py,krxsky/github3.py,itsmemattchung/github3.py
|
python
|
## Code Before:
import base64
import betamax
import os
credentials = [os.environ.get('GH_USER', 'foo').encode(),
os.environ.get('GH_PASSWORD', 'bar').encode()]
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'never' if os.environ.get('TRAVIS_GH3') else 'once'
print('Record mode: {0}'.format(record_mode))
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'x' * 20)
)
config.define_cassette_placeholder(
'<BASIC_AUTH>',
base64.b64encode(b':'.join(credentials)).decode()
)
## Instruction:
Revert "For travis, let us print the mode"
This reverts commit 0c8e9c36219214cf08b33c0ff1812e6cefa53353.
## Code After:
import base64
import betamax
import os
credentials = [os.environ.get('GH_USER', 'foo').encode(),
os.environ.get('GH_PASSWORD', 'bar').encode()]
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'never' if os.environ.get('TRAVIS_GH3') else 'once'
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'x' * 20)
)
config.define_cassette_placeholder(
'<BASIC_AUTH>',
base64.b64encode(b':'.join(credentials)).decode()
)
|
# ... existing code ...
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'never' if os.environ.get('TRAVIS_GH3') else 'once'
config.default_cassette_options['record_mode'] = record_mode
# ... rest of the code ...
|
5054e882194adae4b76681e78c45d41ae2c2f0f7
|
pymatgen/util/sequence.py
|
pymatgen/util/sequence.py
|
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
|
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total, **kwargs):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
|
Allow `PBar` to accept any kwargs (e.g. those used by `tqdm`)
|
Allow `PBar` to accept any kwargs (e.g. those used by `tqdm`)
|
Python
|
mit
|
gVallverdu/pymatgen,vorwerkc/pymatgen,vorwerkc/pymatgen,davidwaroquiers/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen
|
python
|
## Code Before:
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
## Instruction:
Allow `PBar` to accept any kwargs (e.g. those used by `tqdm`)
## Code After:
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total, **kwargs):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
|
...
Progress bar.
"""
def __init__(self, total, **kwargs):
"""
Args:
total (): Total value.
...
|
ae8273f86fc3cc7fdacadf495aa148dda796f11b
|
printcli.py
|
printcli.py
|
import argparse
import os
from labelprinter import Labelprinter
if os.path.isfile('labelprinterServeConf_local.py'):
import labelprinterServeConf_local as conf
else:
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
|
import argparse
import os
from labelprinter import Labelprinter
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
|
Make the CLI use the new config (see e4054fb).
|
Make the CLI use the new config (see e4054fb).
|
Python
|
mit
|
chaosdorf/labello,chaosdorf/labello,chaosdorf/labello
|
python
|
## Code Before:
import argparse
import os
from labelprinter import Labelprinter
if os.path.isfile('labelprinterServeConf_local.py'):
import labelprinterServeConf_local as conf
else:
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
## Instruction:
Make the CLI use the new config (see e4054fb).
## Code After:
import argparse
import os
from labelprinter import Labelprinter
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
|
# ... existing code ...
import os
from labelprinter import Labelprinter
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
# ... rest of the code ...
|
93bd200641267abe2ddf9047e13a2652932a0e3f
|
src/main/java/com/github/vlsidlyarevich/unity/controller/WorkerProfileSearchController.java
|
src/main/java/com/github/vlsidlyarevich/unity/controller/WorkerProfileSearchController.java
|
package com.github.vlsidlyarevich.unity.controller;
import com.github.vlsidlyarevich.unity.service.WorkerProfileSearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* Created by vlad on 30.09.16.
*/
@RestController
@RequestMapping("/api/workers/search")
public class WorkerProfileSearchController {
@Autowired
private WorkerProfileSearchService service;
@RequestMapping(method = RequestMethod.POST)
public ResponseEntity<?> getWorkersByFilters(MultiValueMap<String, String> filters) {
return new ResponseEntity<>(service.findByFilters(filters), HttpStatus.OK);
}
}
|
package com.github.vlsidlyarevich.unity.controller;
import com.github.vlsidlyarevich.unity.service.WorkerProfileSearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
/**
* Created by vlad on 30.09.16.
*/
@RestController
@RequestMapping("/api/workers/search")
public class WorkerProfileSearchController {
@Autowired
private WorkerProfileSearchService service;
@RequestMapping(method = RequestMethod.POST)
public ResponseEntity<?> getWorkersByFilters(@RequestBody Map<String, String> filters) {
return new ResponseEntity<>(service.findByFilters(filters), HttpStatus.OK);
}
}
|
Switch to map as request body
|
Switch to map as request body
|
Java
|
apache-2.0
|
vlsidlyarevich/unity,vlsidlyarevich/unity,vlsidlyarevich/unity,vlsidlyarevich/unity
|
java
|
## Code Before:
package com.github.vlsidlyarevich.unity.controller;
import com.github.vlsidlyarevich.unity.service.WorkerProfileSearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* Created by vlad on 30.09.16.
*/
@RestController
@RequestMapping("/api/workers/search")
public class WorkerProfileSearchController {
@Autowired
private WorkerProfileSearchService service;
@RequestMapping(method = RequestMethod.POST)
public ResponseEntity<?> getWorkersByFilters(MultiValueMap<String, String> filters) {
return new ResponseEntity<>(service.findByFilters(filters), HttpStatus.OK);
}
}
## Instruction:
Switch to map as request body
## Code After:
package com.github.vlsidlyarevich.unity.controller;
import com.github.vlsidlyarevich.unity.service.WorkerProfileSearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
/**
* Created by vlad on 30.09.16.
*/
@RestController
@RequestMapping("/api/workers/search")
public class WorkerProfileSearchController {
@Autowired
private WorkerProfileSearchService service;
@RequestMapping(method = RequestMethod.POST)
public ResponseEntity<?> getWorkersByFilters(@RequestBody Map<String, String> filters) {
return new ResponseEntity<>(service.findByFilters(filters), HttpStatus.OK);
}
}
|
...
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
/**
* Created by vlad on 30.09.16.
...
private WorkerProfileSearchService service;
@RequestMapping(method = RequestMethod.POST)
public ResponseEntity<?> getWorkersByFilters(@RequestBody Map<String, String> filters) {
return new ResponseEntity<>(service.findByFilters(filters), HttpStatus.OK);
}
...
|
edd84bd9975b9f42752dd7d3c44822e9b3ac9fd9
|
software/bulkoperator/src/main/java/edu/wustl/bulkoperator/processor/CustomDateConverter.java
|
software/bulkoperator/src/main/java/edu/wustl/bulkoperator/processor/CustomDateConverter.java
|
package edu.wustl.bulkoperator.processor;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.beanutils.Converter;
import edu.wustl.bulkoperator.metadata.DateValue;
import edu.wustl.common.util.global.ApplicationProperties;
import edu.wustl.common.util.logger.Logger;
public class CustomDateConverter implements Converter {
private final static String DEFAULT_FORMAT = ApplicationProperties.getValue("bulk.date.valid.format.withtime");
private static final Logger logger = Logger.getCommonLogger(CustomDateConverter.class);
public Object convert(Class type, Object value)
{
SimpleDateFormat format = null;
String dateValue=null;
Date date=null;
if (value instanceof DateValue) {
format = new SimpleDateFormat(((DateValue) value).getFormat());
dateValue = ((DateValue) value).getValue();
} else {
format = new SimpleDateFormat(DEFAULT_FORMAT);
dateValue=value.toString();
}
try {
date=format.parse(dateValue);
} catch (ParseException e) {
logger.error("Error while parsing date.", e);
}
return date;
}
}
|
package edu.wustl.bulkoperator.processor;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.beanutils.Converter;
import edu.wustl.bulkoperator.metadata.DateValue;
import edu.wustl.common.util.global.ApplicationProperties;
import edu.wustl.common.util.logger.Logger;
public class CustomDateConverter implements Converter {
private final static String DEFAULT_FORMAT = ApplicationProperties.getValue("bulk.date.valid.format.withtime");
private static final Logger logger = Logger.getCommonLogger(CustomDateConverter.class);
public Object convert(Class type, Object value)
{
SimpleDateFormat format = null;
String dateValue=null;
Date date=null;
if (value instanceof DateValue) {
format = new SimpleDateFormat(((DateValue) value).getFormat());
format.setLenient(false);
dateValue = ((DateValue) value).getValue();
} else {
format = new SimpleDateFormat(DEFAULT_FORMAT);
dateValue=value.toString();
}
try {
date=format.parse(dateValue);
} catch (ParseException e) {
logger.error("Error while parsing date.", e);
}
return date;
}
}
|
Fix for default date format issue.
|
Fix for default date format issue.
SVN-Revision: 402
|
Java
|
bsd-3-clause
|
NCIP/catissue-migration-tool,NCIP/catissue-migration-tool
|
java
|
## Code Before:
package edu.wustl.bulkoperator.processor;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.beanutils.Converter;
import edu.wustl.bulkoperator.metadata.DateValue;
import edu.wustl.common.util.global.ApplicationProperties;
import edu.wustl.common.util.logger.Logger;
public class CustomDateConverter implements Converter {
private final static String DEFAULT_FORMAT = ApplicationProperties.getValue("bulk.date.valid.format.withtime");
private static final Logger logger = Logger.getCommonLogger(CustomDateConverter.class);
public Object convert(Class type, Object value)
{
SimpleDateFormat format = null;
String dateValue=null;
Date date=null;
if (value instanceof DateValue) {
format = new SimpleDateFormat(((DateValue) value).getFormat());
dateValue = ((DateValue) value).getValue();
} else {
format = new SimpleDateFormat(DEFAULT_FORMAT);
dateValue=value.toString();
}
try {
date=format.parse(dateValue);
} catch (ParseException e) {
logger.error("Error while parsing date.", e);
}
return date;
}
}
## Instruction:
Fix for default date format issue.
SVN-Revision: 402
## Code After:
package edu.wustl.bulkoperator.processor;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.beanutils.Converter;
import edu.wustl.bulkoperator.metadata.DateValue;
import edu.wustl.common.util.global.ApplicationProperties;
import edu.wustl.common.util.logger.Logger;
public class CustomDateConverter implements Converter {
private final static String DEFAULT_FORMAT = ApplicationProperties.getValue("bulk.date.valid.format.withtime");
private static final Logger logger = Logger.getCommonLogger(CustomDateConverter.class);
public Object convert(Class type, Object value)
{
SimpleDateFormat format = null;
String dateValue=null;
Date date=null;
if (value instanceof DateValue) {
format = new SimpleDateFormat(((DateValue) value).getFormat());
format.setLenient(false);
dateValue = ((DateValue) value).getValue();
} else {
format = new SimpleDateFormat(DEFAULT_FORMAT);
dateValue=value.toString();
}
try {
date=format.parse(dateValue);
} catch (ParseException e) {
logger.error("Error while parsing date.", e);
}
return date;
}
}
|
// ... existing code ...
Date date=null;
if (value instanceof DateValue) {
format = new SimpleDateFormat(((DateValue) value).getFormat());
format.setLenient(false);
dateValue = ((DateValue) value).getValue();
} else {
format = new SimpleDateFormat(DEFAULT_FORMAT);
// ... rest of the code ...
|
1e7780610468211cceb4483beea45aa7c850834e
|
chrome/browser/ui/views/toolbar/wrench_menu_observer.h
|
chrome/browser/ui/views/toolbar/wrench_menu_observer.h
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
#define CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
// TODO(gbillock): Make this an inner class of WrenchMenu. (even needed?)
class WrenchMenuObserver {
public:
// Invoked when the WrenchMenu is about to be destroyed (from its destructor).
virtual void WrenchMenuDestroyed() = 0;
protected:
virtual ~WrenchMenuObserver() {}
};
#endif // CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
#define CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
class WrenchMenuObserver {
public:
// Invoked when the WrenchMenu is about to be destroyed (from its destructor).
virtual void WrenchMenuDestroyed() = 0;
protected:
virtual ~WrenchMenuObserver() {}
};
#endif // CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
|
Remove TODO about making WrenchMenuObserver an inner class of WrenchMenu.
|
Remove TODO about making WrenchMenuObserver an inner class of WrenchMenu.
Chromium code style says:
"Prefer putting delegate classes in their own header files. Implementors
of the
delegate interface will often be included elsewhere, which will often
cause more
coupling with the header of the main class."
http://www.chromium.org/developers/coding-style - Code formatting
section
BUG=None
[email protected]
NOTRY=true
Review URL: https://codereview.chromium.org/118623003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@244848 0039d316-1c4b-4281-b951-d872f2087c98
|
C
|
bsd-3-clause
|
PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,jaruba/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,Just-D/chromium-1,ondra-novak/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,patrickm/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,bright-sparks/chromium-spacewalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,ltilve/chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,anirudhSK/chromium,Just-D/chromium-1,Jonekee/chromium.src,markYoungH/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,patrickm/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,ChromiumWebApps/chromium,markYoungH/chromium.src,dednal/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,dushu1203/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,axinging/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,patrickm/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,Just-D/chromium-1,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,bright-sparks/chromium-spacewalk,ltilve/chromium,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,dednal/chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,Just-D/chromium-1,jaruba/chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium
|
c
|
## Code Before:
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
#define CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
// TODO(gbillock): Make this an inner class of WrenchMenu. (even needed?)
class WrenchMenuObserver {
public:
// Invoked when the WrenchMenu is about to be destroyed (from its destructor).
virtual void WrenchMenuDestroyed() = 0;
protected:
virtual ~WrenchMenuObserver() {}
};
#endif // CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
## Instruction:
Remove TODO about making WrenchMenuObserver an inner class of WrenchMenu.
Chromium code style says:
"Prefer putting delegate classes in their own header files. Implementors
of the
delegate interface will often be included elsewhere, which will often
cause more
coupling with the header of the main class."
http://www.chromium.org/developers/coding-style - Code formatting
section
BUG=None
[email protected]
NOTRY=true
Review URL: https://codereview.chromium.org/118623003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@244848 0039d316-1c4b-4281-b951-d872f2087c98
## Code After:
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
#define CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
class WrenchMenuObserver {
public:
// Invoked when the WrenchMenu is about to be destroyed (from its destructor).
virtual void WrenchMenuDestroyed() = 0;
protected:
virtual ~WrenchMenuObserver() {}
};
#endif // CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
|
# ... existing code ...
#ifndef CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
#define CHROME_BROWSER_UI_VIEWS_TOOLBAR_WRENCH_MENU_OBSERVER_H_
class WrenchMenuObserver {
public:
// Invoked when the WrenchMenu is about to be destroyed (from its destructor).
# ... rest of the code ...
|
c3479ba8d8486ae9a274367b4601e9e4b6699a1a
|
prj/urls.py
|
prj/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
# Root
url( r'^$', 'wishlist.views.index' ),
)
|
Add root URL (to serve public wishlist)
|
Add root URL (to serve public wishlist)
|
Python
|
mit
|
cgarvey/django-mywishlist,cgarvey/django-mywishlist
|
python
|
## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
)
## Instruction:
Add root URL (to serve public wishlist)
## Code After:
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
# Root
url( r'^$', 'wishlist.views.index' ),
)
|
# ... existing code ...
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
# Root
url( r'^$', 'wishlist.views.index' ),
)
# ... rest of the code ...
|
ed9635ab7ca086bb79a48daae8a390887b7bf78f
|
datadict/datadict_utils.py
|
datadict/datadict_utils.py
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
Read datadict file as-is, without type-guessing
|
Read datadict file as-is, without type-guessing
|
Python
|
bsd-3-clause
|
sibis-platform/ncanda-datacore,sibis-platform/ncanda-datacore,sibis-platform/ncanda-data-integration,sibis-platform/ncanda-datacore,sibis-platform/ncanda-data-integration
|
python
|
## Code Before:
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
## Instruction:
Read datadict file as-is, without type-guessing
## Code After:
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
...
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
...
|
ce86f13553e97e3e86f8c07bf09228895aacd3c5
|
scripts/master/factory/syzygy_commands.py
|
scripts/master/factory/syzygy_commands.py
|
from buildbot.steps import shell
from master.factory import commands
class SyzygyCommands(commands.FactoryCommands):
"""Encapsulates methods to add Syzygy commands to a buildbot factory."""
def __init__(self, factory=None, target=None, build_dir=None,
target_platform=None, target_arch=None):
commands.FactoryCommands.__init__(self, factory, target, build_dir,
target_platform)
self._arch = target_arch
self._factory = factory
def AddRandomizeChromeStep(self, timeout=600):
# Randomization script path.
script_path = self.PathJoin('internal', 'build', 'randomize_chrome.py')
command = [self._python, script_path,
'--build-dir=%s' % self._build_dir,
'--target=%s' % self._target]
self.factory.addStep(shell.ShellCommand,
name='randomize',
description=['Randomly', 'Reordering', 'Chrome'],
command=cmd,
timeout=timeout)
|
from buildbot.steps import shell
from master.factory import commands
class SyzygyCommands(commands.FactoryCommands):
"""Encapsulates methods to add Syzygy commands to a buildbot factory."""
def __init__(self, factory=None, target=None, build_dir=None,
target_platform=None, target_arch=None):
commands.FactoryCommands.__init__(self, factory, target, build_dir,
target_platform)
self._arch = target_arch
self._factory = factory
def AddRandomizeChromeStep(self, timeout=600):
# Randomization script path.
script_path = self.PathJoin(self._build_dir, 'internal', 'build',
'randomize_chrome.py')
command = [self._python, script_path,
'--build-dir=%s' % self._build_dir,
'--target=%s' % self._target,
'--verbose']
self._factory.addStep(shell.ShellCommand,
name='randomize',
description=['Randomly', 'Reordering', 'Chrome'],
command=command,
timeout=timeout)
|
Fix typos and paths broken in previous CL.
|
Fix typos and paths broken in previous CL.
Review URL: http://codereview.chromium.org/7085037
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@87249 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
python
|
## Code Before:
from buildbot.steps import shell
from master.factory import commands
class SyzygyCommands(commands.FactoryCommands):
"""Encapsulates methods to add Syzygy commands to a buildbot factory."""
def __init__(self, factory=None, target=None, build_dir=None,
target_platform=None, target_arch=None):
commands.FactoryCommands.__init__(self, factory, target, build_dir,
target_platform)
self._arch = target_arch
self._factory = factory
def AddRandomizeChromeStep(self, timeout=600):
# Randomization script path.
script_path = self.PathJoin('internal', 'build', 'randomize_chrome.py')
command = [self._python, script_path,
'--build-dir=%s' % self._build_dir,
'--target=%s' % self._target]
self.factory.addStep(shell.ShellCommand,
name='randomize',
description=['Randomly', 'Reordering', 'Chrome'],
command=cmd,
timeout=timeout)
## Instruction:
Fix typos and paths broken in previous CL.
Review URL: http://codereview.chromium.org/7085037
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@87249 0039d316-1c4b-4281-b951-d872f2087c98
## Code After:
from buildbot.steps import shell
from master.factory import commands
class SyzygyCommands(commands.FactoryCommands):
"""Encapsulates methods to add Syzygy commands to a buildbot factory."""
def __init__(self, factory=None, target=None, build_dir=None,
target_platform=None, target_arch=None):
commands.FactoryCommands.__init__(self, factory, target, build_dir,
target_platform)
self._arch = target_arch
self._factory = factory
def AddRandomizeChromeStep(self, timeout=600):
# Randomization script path.
script_path = self.PathJoin(self._build_dir, 'internal', 'build',
'randomize_chrome.py')
command = [self._python, script_path,
'--build-dir=%s' % self._build_dir,
'--target=%s' % self._target,
'--verbose']
self._factory.addStep(shell.ShellCommand,
name='randomize',
description=['Randomly', 'Reordering', 'Chrome'],
command=command,
timeout=timeout)
|
...
def AddRandomizeChromeStep(self, timeout=600):
# Randomization script path.
script_path = self.PathJoin(self._build_dir, 'internal', 'build',
'randomize_chrome.py')
command = [self._python, script_path,
'--build-dir=%s' % self._build_dir,
'--target=%s' % self._target,
'--verbose']
self._factory.addStep(shell.ShellCommand,
name='randomize',
description=['Randomly', 'Reordering', 'Chrome'],
command=command,
timeout=timeout)
...
|
c1ae1393f8b3717c4dd174a4cf7134c70805ab9d
|
src/main/java/parse/ArticleParse.java
|
src/main/java/parse/ArticleParse.java
|
/*
Article Parse
*/
import java.io.*;
public class ArticleParse {
public static void main(String[] args) {
System.out.println("Running Article Parse");
}
}
|
/*
Article Parse
*/
import java.io.*;
public class ArticleParse {
public static void main(String[] args) {
if (args.length == 0) {
printUsageInterface();
System.exit(1);
}
System.out.println("Running Article Parse... (prints PDFs encountered)");
}
private static void printUsageInterface() {
String message = "";
message += "+++ ArticleParse +++\n";
message += "\n";
message += " PURPOSE: Given a list of locations (files or directories) reads each PDF document encountered.\n";
message += " The content of the PDF is semantically parsed. For example the authors of the article can be selected and saved.\n";
message += "\n";
message += " USAGE: ArticleParse <location_1> [ .. <location_n> ]\n";
System.err.print(message);
}
}
|
Print purpose and usage if called without arguments
|
feature: Print purpose and usage if called without arguments
|
Java
|
mit
|
EJBM-Einstein/ArticleParse
|
java
|
## Code Before:
/*
Article Parse
*/
import java.io.*;
public class ArticleParse {
public static void main(String[] args) {
System.out.println("Running Article Parse");
}
}
## Instruction:
feature: Print purpose and usage if called without arguments
## Code After:
/*
Article Parse
*/
import java.io.*;
public class ArticleParse {
public static void main(String[] args) {
if (args.length == 0) {
printUsageInterface();
System.exit(1);
}
System.out.println("Running Article Parse... (prints PDFs encountered)");
}
private static void printUsageInterface() {
String message = "";
message += "+++ ArticleParse +++\n";
message += "\n";
message += " PURPOSE: Given a list of locations (files or directories) reads each PDF document encountered.\n";
message += " The content of the PDF is semantically parsed. For example the authors of the article can be selected and saved.\n";
message += "\n";
message += " USAGE: ArticleParse <location_1> [ .. <location_n> ]\n";
System.err.print(message);
}
}
|
...
public static void main(String[] args) {
if (args.length == 0) {
printUsageInterface();
System.exit(1);
}
System.out.println("Running Article Parse... (prints PDFs encountered)");
}
private static void printUsageInterface() {
String message = "";
message += "+++ ArticleParse +++\n";
message += "\n";
message += " PURPOSE: Given a list of locations (files or directories) reads each PDF document encountered.\n";
message += " The content of the PDF is semantically parsed. For example the authors of the article can be selected and saved.\n";
message += "\n";
message += " USAGE: ArticleParse <location_1> [ .. <location_n> ]\n";
System.err.print(message);
}
}
...
|
fa43b19f257ee5de2cb9b0b4075e69c98ed0b2d4
|
src/main/java/in/twizmwaz/cardinal/util/DomUtils.java
|
src/main/java/in/twizmwaz/cardinal/util/DomUtils.java
|
package in.twizmwaz.cardinal.util;
import org.jdom2.Document;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import java.io.File;
import java.io.IOException;
public class DomUtils {
public static Document parse(File file) throws JDOMException, IOException {
SAXBuilder saxBuilder = new SAXBuilder();
return saxBuilder.build(file);
}
}
|
package in.twizmwaz.cardinal.util;
import org.bukkit.Bukkit;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Path;
public class DomUtils {
public static Document parse(File file) throws JDOMException, IOException {
SAXBuilder saxBuilder = new SAXBuilder();
Document original = saxBuilder.build(file);
Path repo = file.getParentFile().getParentFile().toPath();
for (Element include : original.getRootElement().getChildren("include")) {
try {
Path included;
try {
included = repo.resolve(include.getAttributeValue("src")).normalize();
for (Element element : parse(included.toFile()).getRootElement().getChildren()) {
original.getRootElement().addContent(element.detach());
}
} catch (FileNotFoundException e) {
included = file.getParentFile().toPath().resolve(include.getAttributeValue("src")).normalize();
for (Element element : parse(included.toFile()).getRootElement().getChildren()) {
original.getRootElement().addContent(element.detach());
Bukkit.getLogger().info(element.getName());
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
return original;
}
}
|
Add includes, not working yet
|
Add includes, not working yet
|
Java
|
mit
|
dentmaged/Cardinal-Plus,TheMolkaPL/CardinalPGM,twizmwazin/CardinalPGM,Aaron1011/CardinalPGM,angelitorb99/CardinalPGM,dentmaged/CardinalPGM,dentmaged/Cardinal-Plus,Alan736/NotCardinalPGM,Electroid/ExperimentalPGM,dentmaged/Cardinal-Dev,dentmaged/CardinalPGM,iPGz/CardinalPGM,CaptainElliott/CardinalPGM,TheMolkaPL/CardinalPGM,Pablete1234/CardinalPGM,Electroid/ExperimentalPGM,Alan736/NotCardinalPGM,dentmaged/Cardinal-Dev,SungMatt/CardinalPGM
|
java
|
## Code Before:
package in.twizmwaz.cardinal.util;
import org.jdom2.Document;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import java.io.File;
import java.io.IOException;
public class DomUtils {
public static Document parse(File file) throws JDOMException, IOException {
SAXBuilder saxBuilder = new SAXBuilder();
return saxBuilder.build(file);
}
}
## Instruction:
Add includes, not working yet
## Code After:
package in.twizmwaz.cardinal.util;
import org.bukkit.Bukkit;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Path;
public class DomUtils {
public static Document parse(File file) throws JDOMException, IOException {
SAXBuilder saxBuilder = new SAXBuilder();
Document original = saxBuilder.build(file);
Path repo = file.getParentFile().getParentFile().toPath();
for (Element include : original.getRootElement().getChildren("include")) {
try {
Path included;
try {
included = repo.resolve(include.getAttributeValue("src")).normalize();
for (Element element : parse(included.toFile()).getRootElement().getChildren()) {
original.getRootElement().addContent(element.detach());
}
} catch (FileNotFoundException e) {
included = file.getParentFile().toPath().resolve(include.getAttributeValue("src")).normalize();
for (Element element : parse(included.toFile()).getRootElement().getChildren()) {
original.getRootElement().addContent(element.detach());
Bukkit.getLogger().info(element.getName());
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
return original;
}
}
|
// ... existing code ...
package in.twizmwaz.cardinal.util;
import org.bukkit.Bukkit;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Path;
public class DomUtils {
public static Document parse(File file) throws JDOMException, IOException {
SAXBuilder saxBuilder = new SAXBuilder();
Document original = saxBuilder.build(file);
Path repo = file.getParentFile().getParentFile().toPath();
for (Element include : original.getRootElement().getChildren("include")) {
try {
Path included;
try {
included = repo.resolve(include.getAttributeValue("src")).normalize();
for (Element element : parse(included.toFile()).getRootElement().getChildren()) {
original.getRootElement().addContent(element.detach());
}
} catch (FileNotFoundException e) {
included = file.getParentFile().toPath().resolve(include.getAttributeValue("src")).normalize();
for (Element element : parse(included.toFile()).getRootElement().getChildren()) {
original.getRootElement().addContent(element.detach());
Bukkit.getLogger().info(element.getName());
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
return original;
}
}
// ... rest of the code ...
|
83443f472fff11a74e983e42c56a245953890248
|
src/ref.h
|
src/ref.h
|
enum class ref_src_t {
FILE,
AIRCRAFT,
PLUGIN,
USER_MSG,
BLACKLIST,
};
/// Superclass defining some common interface items for dataref and commandref.
class RefRecord {
protected:
std::string name;
ref_src_t source;
std::chrono::system_clock::time_point last_updated;
std::chrono::system_clock::time_point last_updated_big;
RefRecord(const std::string & name, ref_src_t source) : name(name), source(source), last_updated(std::chrono::system_clock::now()) {}
public:
virtual ~RefRecord() {}
const std::string & getName() const { return name; }
ref_src_t getSource() const { return source; }
virtual std::string getDisplayString(size_t display_length) const = 0;
bool isBlacklisted() const { return ref_src_t::BLACKLIST == source; }
const std::chrono::system_clock::time_point & getLastUpdateTime() const { return last_updated; }
const std::chrono::system_clock::time_point & getLastBigUpdateTime() const { return last_updated_big; }
};
|
enum class ref_src_t {
FILE,
AIRCRAFT,
PLUGIN,
USER_MSG,
BLACKLIST,
};
/// Superclass defining some common interface items for dataref and commandref.
class RefRecord {
protected:
std::string name;
ref_src_t source;
std::chrono::system_clock::time_point last_updated;
std::chrono::system_clock::time_point last_updated_big;
RefRecord(const std::string & name, ref_src_t source) : name(name), source(source), last_updated(std::chrono::system_clock::from_time_t(0)), last_updated_big(std::chrono::system_clock::from_time_t(0)) {}
public:
virtual ~RefRecord() {}
const std::string & getName() const { return name; }
ref_src_t getSource() const { return source; }
virtual std::string getDisplayString(size_t display_length) const = 0;
bool isBlacklisted() const { return ref_src_t::BLACKLIST == source; }
const std::chrono::system_clock::time_point & getLastUpdateTime() const { return last_updated; }
const std::chrono::system_clock::time_point & getLastBigUpdateTime() const { return last_updated_big; }
};
|
Handle initialization of change detection variables in a more sane way. No more false positives of changes on startup.
|
Handle initialization of change detection variables in a more sane way. No more false positives of changes on startup.
|
C
|
mit
|
leecbaker/datareftool,leecbaker/datareftool,leecbaker/datareftool
|
c
|
## Code Before:
enum class ref_src_t {
FILE,
AIRCRAFT,
PLUGIN,
USER_MSG,
BLACKLIST,
};
/// Superclass defining some common interface items for dataref and commandref.
class RefRecord {
protected:
std::string name;
ref_src_t source;
std::chrono::system_clock::time_point last_updated;
std::chrono::system_clock::time_point last_updated_big;
RefRecord(const std::string & name, ref_src_t source) : name(name), source(source), last_updated(std::chrono::system_clock::now()) {}
public:
virtual ~RefRecord() {}
const std::string & getName() const { return name; }
ref_src_t getSource() const { return source; }
virtual std::string getDisplayString(size_t display_length) const = 0;
bool isBlacklisted() const { return ref_src_t::BLACKLIST == source; }
const std::chrono::system_clock::time_point & getLastUpdateTime() const { return last_updated; }
const std::chrono::system_clock::time_point & getLastBigUpdateTime() const { return last_updated_big; }
};
## Instruction:
Handle initialization of change detection variables in a more sane way. No more false positives of changes on startup.
## Code After:
enum class ref_src_t {
FILE,
AIRCRAFT,
PLUGIN,
USER_MSG,
BLACKLIST,
};
/// Superclass defining some common interface items for dataref and commandref.
class RefRecord {
protected:
std::string name;
ref_src_t source;
std::chrono::system_clock::time_point last_updated;
std::chrono::system_clock::time_point last_updated_big;
RefRecord(const std::string & name, ref_src_t source) : name(name), source(source), last_updated(std::chrono::system_clock::from_time_t(0)), last_updated_big(std::chrono::system_clock::from_time_t(0)) {}
public:
virtual ~RefRecord() {}
const std::string & getName() const { return name; }
ref_src_t getSource() const { return source; }
virtual std::string getDisplayString(size_t display_length) const = 0;
bool isBlacklisted() const { return ref_src_t::BLACKLIST == source; }
const std::chrono::system_clock::time_point & getLastUpdateTime() const { return last_updated; }
const std::chrono::system_clock::time_point & getLastBigUpdateTime() const { return last_updated_big; }
};
|
# ... existing code ...
ref_src_t source;
std::chrono::system_clock::time_point last_updated;
std::chrono::system_clock::time_point last_updated_big;
RefRecord(const std::string & name, ref_src_t source) : name(name), source(source), last_updated(std::chrono::system_clock::from_time_t(0)), last_updated_big(std::chrono::system_clock::from_time_t(0)) {}
public:
virtual ~RefRecord() {}
const std::string & getName() const { return name; }
# ... rest of the code ...
|
3021418af6651ed1ffa7665353fbe5f2007304fe
|
src/com/temporaryteam/noticeditor/model/NoticeTree.java
|
src/com/temporaryteam/noticeditor/model/NoticeTree.java
|
package com.temporaryteam.noticeditor.model;
import org.json.JSONException;
import org.json.JSONObject;
public class NoticeTree {
private final NoticeTreeItem root;
public NoticeTree(NoticeTreeItem root) {
this.root = root;
}
public NoticeTree(JSONObject jsobj) throws JSONException {
root = new NoticeTreeItem(jsobj);
}
public NoticeTreeItem getRoot() {
return root;
}
/**
* @param item
* @param parent if null, item will be added to root item.
*/
public void addItem(NoticeTreeItem item, NoticeTreeItem parent) {
if (parent == null) {
parent = root;
} else if (parent.isLeaf()) {
parent = (NoticeTreeItem) parent.getParent();
}
parent.getChildren().add(item);
}
public void removeItem(NoticeTreeItem item) {
if (item == null) return;
item.getParent().getChildren().remove(item);
}
public JSONObject toJson() throws JSONException {
return root.toJson();
}
}
|
package com.temporaryteam.noticeditor.model;
import org.json.JSONException;
import org.json.JSONObject;
public class NoticeTree {
private final NoticeTreeItem root;
public NoticeTree(NoticeTreeItem root) {
this.root = root;
}
public NoticeTree(JSONObject jsobj) throws JSONException {
root = new NoticeTreeItem(jsobj);
}
public NoticeTreeItem getRoot() {
return root;
}
/**
* @param item
* @param parent if null, item will be added to root item.
*/
public void addItem(NoticeTreeItem item, NoticeTreeItem parent) {
if (parent == null) {
parent = root;
} else if (parent.isLeaf()) {
parent = (NoticeTreeItem) parent.getParent();
}
parent.getChildren().add(item);
parent.setExpanded(true);
}
public void removeItem(NoticeTreeItem item) {
if (item == null) return;
item.getParent().getChildren().remove(item);
}
public JSONObject toJson() throws JSONException {
return root.toJson();
}
}
|
Expand branch when add item
|
Expand branch when add item
|
Java
|
apache-2.0
|
NoticEditorTeam/NoticEditor,NaikSoftware/TreeNote,NaikSoftware/TreeNote,White-Oak/NoticEditor,White-Oak/NoticEditor,NoticEditorTeam/NoticEditor
|
java
|
## Code Before:
package com.temporaryteam.noticeditor.model;
import org.json.JSONException;
import org.json.JSONObject;
public class NoticeTree {
private final NoticeTreeItem root;
public NoticeTree(NoticeTreeItem root) {
this.root = root;
}
public NoticeTree(JSONObject jsobj) throws JSONException {
root = new NoticeTreeItem(jsobj);
}
public NoticeTreeItem getRoot() {
return root;
}
/**
* @param item
* @param parent if null, item will be added to root item.
*/
public void addItem(NoticeTreeItem item, NoticeTreeItem parent) {
if (parent == null) {
parent = root;
} else if (parent.isLeaf()) {
parent = (NoticeTreeItem) parent.getParent();
}
parent.getChildren().add(item);
}
public void removeItem(NoticeTreeItem item) {
if (item == null) return;
item.getParent().getChildren().remove(item);
}
public JSONObject toJson() throws JSONException {
return root.toJson();
}
}
## Instruction:
Expand branch when add item
## Code After:
package com.temporaryteam.noticeditor.model;
import org.json.JSONException;
import org.json.JSONObject;
public class NoticeTree {
private final NoticeTreeItem root;
public NoticeTree(NoticeTreeItem root) {
this.root = root;
}
public NoticeTree(JSONObject jsobj) throws JSONException {
root = new NoticeTreeItem(jsobj);
}
public NoticeTreeItem getRoot() {
return root;
}
/**
* @param item
* @param parent if null, item will be added to root item.
*/
public void addItem(NoticeTreeItem item, NoticeTreeItem parent) {
if (parent == null) {
parent = root;
} else if (parent.isLeaf()) {
parent = (NoticeTreeItem) parent.getParent();
}
parent.getChildren().add(item);
parent.setExpanded(true);
}
public void removeItem(NoticeTreeItem item) {
if (item == null) return;
item.getParent().getChildren().remove(item);
}
public JSONObject toJson() throws JSONException {
return root.toJson();
}
}
|
// ... existing code ...
parent = (NoticeTreeItem) parent.getParent();
}
parent.getChildren().add(item);
parent.setExpanded(true);
}
public void removeItem(NoticeTreeItem item) {
// ... rest of the code ...
|
d9b06edb63d20550c4b3fa0fa6924d99724dc11a
|
examples/image_resize.py
|
examples/image_resize.py
|
from __future__ import print_function
from transloadit.client import Transloadit
tl = Transloadit('TRANSLOADIT_KEY', 'TRANSLOADIT_SECRET')
ass = tl.new_assembly()
ass.add_file(open('fixtures/lol_cat.jpg', 'rb'))
ass.add_step('resize', '/image/resize', {'width': 70, 'height': 70})
response = ass.create(wait=True)
result_url = response.data.get('results').get('resize')[0].get('ssl_url')
print('Your result:', result_url)
|
from transloadit.client import Transloadit
tl = Transloadit("TRANSLOADIT_KEY", "TRANSLOADIT_SECRET")
ass = tl.new_assembly()
ass.add_file(open("fixtures/lol_cat.jpg", "rb"))
ass.add_step("resize", "/image/resize", {"width": 70, "height": 70})
response = ass.create(wait=True)
result_url = response.data.get("results").get("resize")[0].get("ssl_url")
print("Your result:", result_url)
|
Update example syntax to python3
|
Update example syntax to python3
|
Python
|
mit
|
ifedapoolarewaju/transloadit-python-sdk
|
python
|
## Code Before:
from __future__ import print_function
from transloadit.client import Transloadit
tl = Transloadit('TRANSLOADIT_KEY', 'TRANSLOADIT_SECRET')
ass = tl.new_assembly()
ass.add_file(open('fixtures/lol_cat.jpg', 'rb'))
ass.add_step('resize', '/image/resize', {'width': 70, 'height': 70})
response = ass.create(wait=True)
result_url = response.data.get('results').get('resize')[0].get('ssl_url')
print('Your result:', result_url)
## Instruction:
Update example syntax to python3
## Code After:
from transloadit.client import Transloadit
tl = Transloadit("TRANSLOADIT_KEY", "TRANSLOADIT_SECRET")
ass = tl.new_assembly()
ass.add_file(open("fixtures/lol_cat.jpg", "rb"))
ass.add_step("resize", "/image/resize", {"width": 70, "height": 70})
response = ass.create(wait=True)
result_url = response.data.get("results").get("resize")[0].get("ssl_url")
print("Your result:", result_url)
|
...
from transloadit.client import Transloadit
tl = Transloadit("TRANSLOADIT_KEY", "TRANSLOADIT_SECRET")
ass = tl.new_assembly()
ass.add_file(open("fixtures/lol_cat.jpg", "rb"))
ass.add_step("resize", "/image/resize", {"width": 70, "height": 70})
response = ass.create(wait=True)
result_url = response.data.get("results").get("resize")[0].get("ssl_url")
print("Your result:", result_url)
...
|
27ab5b022dec68f18d07988b97d65ec8fd8db83e
|
zenaida/contrib/hints/views.py
|
zenaida/contrib/hints/views.py
|
from zenaida.contrib.hints.models import Dismissed
from zenaida.contrib.hints.forms import DismissHintForm
from django.http import (HttpResponse, HttpResponseNotAllowed,
HttpResponseBadRequest, HttpResponseRedirect)
def dismiss(request):
if not request.POST:
return HttpResponseNotAllowed(['POST'])
else:
form = DismissHintForm(request.POST)
dismissed = form.save(commit=False)
dismissed.user = request.user
dismissed.save()
if 'next' in request.GET:
next_url = request.GET['next']
else:
next_url = request.META['HTTP_REFERER']
return HttpResponseRedirect(next_url)
|
from zenaida.contrib.hints.models import Dismissed
from zenaida.contrib.hints.forms import DismissHintForm
from django.core.exceptions import SuspiciousOperation
from django.http import (HttpResponse, HttpResponseNotAllowed,
HttpResponseBadRequest, HttpResponseRedirect)
from django.utils.http import is_safe_url
def dismiss(request):
if not request.POST:
return HttpResponseNotAllowed(['POST'])
else:
form = DismissHintForm(request.POST)
dismissed = form.save(commit=False)
dismissed.user = request.user
dismissed.save()
if 'next' in request.GET:
next_url = request.GET['next']
else:
next_url = request.META['HTTP_REFERER']
if not is_safe_url(next_url, host=request.get_host()):
raise SuspiciousOperation("Url {} is not safe to redirect to.".format(next_url))
return HttpResponseRedirect(next_url)
|
Check url safety before redirecting. Safety first!
|
[hints] Check url safety before redirecting. Safety first!
|
Python
|
bsd-3-clause
|
littleweaver/django-zenaida,littleweaver/django-zenaida,littleweaver/django-zenaida,littleweaver/django-zenaida
|
python
|
## Code Before:
from zenaida.contrib.hints.models import Dismissed
from zenaida.contrib.hints.forms import DismissHintForm
from django.http import (HttpResponse, HttpResponseNotAllowed,
HttpResponseBadRequest, HttpResponseRedirect)
def dismiss(request):
if not request.POST:
return HttpResponseNotAllowed(['POST'])
else:
form = DismissHintForm(request.POST)
dismissed = form.save(commit=False)
dismissed.user = request.user
dismissed.save()
if 'next' in request.GET:
next_url = request.GET['next']
else:
next_url = request.META['HTTP_REFERER']
return HttpResponseRedirect(next_url)
## Instruction:
[hints] Check url safety before redirecting. Safety first!
## Code After:
from zenaida.contrib.hints.models import Dismissed
from zenaida.contrib.hints.forms import DismissHintForm
from django.core.exceptions import SuspiciousOperation
from django.http import (HttpResponse, HttpResponseNotAllowed,
HttpResponseBadRequest, HttpResponseRedirect)
from django.utils.http import is_safe_url
def dismiss(request):
if not request.POST:
return HttpResponseNotAllowed(['POST'])
else:
form = DismissHintForm(request.POST)
dismissed = form.save(commit=False)
dismissed.user = request.user
dismissed.save()
if 'next' in request.GET:
next_url = request.GET['next']
else:
next_url = request.META['HTTP_REFERER']
if not is_safe_url(next_url, host=request.get_host()):
raise SuspiciousOperation("Url {} is not safe to redirect to.".format(next_url))
return HttpResponseRedirect(next_url)
|
...
from zenaida.contrib.hints.models import Dismissed
from zenaida.contrib.hints.forms import DismissHintForm
from django.core.exceptions import SuspiciousOperation
from django.http import (HttpResponse, HttpResponseNotAllowed,
HttpResponseBadRequest, HttpResponseRedirect)
from django.utils.http import is_safe_url
def dismiss(request):
if not request.POST:
...
next_url = request.GET['next']
else:
next_url = request.META['HTTP_REFERER']
if not is_safe_url(next_url, host=request.get_host()):
raise SuspiciousOperation("Url {} is not safe to redirect to.".format(next_url))
return HttpResponseRedirect(next_url)
...
|
bd823c76c5ada266060c93e45e470e35b0069806
|
setup.py
|
setup.py
|
from codecs import open as codecs_open
from setuptools import setup, find_packages
with codecs_open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(name='gypsy',
version='0.0.1',
description=u"Controlling Gypsy modules, and outputs",
long_description=long_description,
classifiers=[],
keywords='',
author=u"Julianno Sambatti",
author_email='[email protected]',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
zip_safe=False,
include_package_data=True,
package_data={
'gypsy': ['data/*'],
},
install_requires=[
'click==6.6',
'pandas==0.18.1',
'scipy==0.17.1',
],
extras_require={
'test': ['pytest==2.9.1'],
'dev': ['pytest==2.9.1', 'sphinx==1.4.1',
'pylint==1.5.4', 'git-pylint-commit-hook==2.1.1']
},
entry_points="""
[console_scripts]
gypsy=gypsy.scripts.cli:cli
"""
)
|
from codecs import open as codecs_open
from setuptools import setup, find_packages
with codecs_open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(name='gypsy',
version='0.0.1',
description=u"Controlling Gypsy modules, and outputs",
long_description=long_description,
classifiers=[],
keywords='',
author=u"Julianno Sambatti",
author_email='[email protected]',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
zip_safe=False,
include_package_data=True,
package_data={
'gypsy': ['data/*'],
},
install_requires=[
'click>=6.6',
'pandas>=0.18.1',
'scipy>=0.17.1',
],
extras_require={
'test': ['pytest>=2.9.1'],
'dev': ['pytest>=2.9.1', 'sphinx>=1.4.1',
'pylint>=1.5.4', 'git-pylint-commit-hook>=2.1.1',
'pytest-cov>=2.3.1']
},
entry_points="""
[console_scripts]
gypsy=gypsy.scripts.cli:cli
"""
)
|
Add pytest-cov and fix change requirements to >=
|
Add pytest-cov and fix change requirements to >=
|
Python
|
mit
|
tesera/pygypsy,tesera/pygypsy
|
python
|
## Code Before:
from codecs import open as codecs_open
from setuptools import setup, find_packages
with codecs_open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(name='gypsy',
version='0.0.1',
description=u"Controlling Gypsy modules, and outputs",
long_description=long_description,
classifiers=[],
keywords='',
author=u"Julianno Sambatti",
author_email='[email protected]',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
zip_safe=False,
include_package_data=True,
package_data={
'gypsy': ['data/*'],
},
install_requires=[
'click==6.6',
'pandas==0.18.1',
'scipy==0.17.1',
],
extras_require={
'test': ['pytest==2.9.1'],
'dev': ['pytest==2.9.1', 'sphinx==1.4.1',
'pylint==1.5.4', 'git-pylint-commit-hook==2.1.1']
},
entry_points="""
[console_scripts]
gypsy=gypsy.scripts.cli:cli
"""
)
## Instruction:
Add pytest-cov and fix change requirements to >=
## Code After:
from codecs import open as codecs_open
from setuptools import setup, find_packages
with codecs_open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(name='gypsy',
version='0.0.1',
description=u"Controlling Gypsy modules, and outputs",
long_description=long_description,
classifiers=[],
keywords='',
author=u"Julianno Sambatti",
author_email='[email protected]',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
zip_safe=False,
include_package_data=True,
package_data={
'gypsy': ['data/*'],
},
install_requires=[
'click>=6.6',
'pandas>=0.18.1',
'scipy>=0.17.1',
],
extras_require={
'test': ['pytest>=2.9.1'],
'dev': ['pytest>=2.9.1', 'sphinx>=1.4.1',
'pylint>=1.5.4', 'git-pylint-commit-hook>=2.1.1',
'pytest-cov>=2.3.1']
},
entry_points="""
[console_scripts]
gypsy=gypsy.scripts.cli:cli
"""
)
|
// ... existing code ...
'gypsy': ['data/*'],
},
install_requires=[
'click>=6.6',
'pandas>=0.18.1',
'scipy>=0.17.1',
],
extras_require={
'test': ['pytest>=2.9.1'],
'dev': ['pytest>=2.9.1', 'sphinx>=1.4.1',
'pylint>=1.5.4', 'git-pylint-commit-hook>=2.1.1',
'pytest-cov>=2.3.1']
},
entry_points="""
[console_scripts]
// ... rest of the code ...
|
4eab1fb42f58d6203a0862aa9caf304193d3442b
|
libcloud/common/maxihost.py
|
libcloud/common/maxihost.py
|
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
return headers
|
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
headers['Accept']: 'application/vnd.maxihost.v1.1+json'
return headers
|
Add Accept header to use version 1.1
|
Add Accept header to use version 1.1
|
Python
|
apache-2.0
|
ByteInternet/libcloud,andrewsomething/libcloud,ByteInternet/libcloud,Kami/libcloud,apache/libcloud,andrewsomething/libcloud,mistio/libcloud,Kami/libcloud,mistio/libcloud,andrewsomething/libcloud,apache/libcloud,apache/libcloud,ByteInternet/libcloud,Kami/libcloud,mistio/libcloud
|
python
|
## Code Before:
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
return headers
## Instruction:
Add Accept header to use version 1.1
## Code After:
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
headers['Accept']: 'application/vnd.maxihost.v1.1+json'
return headers
|
# ... existing code ...
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
headers['Accept']: 'application/vnd.maxihost.v1.1+json'
return headers
# ... rest of the code ...
|
7b6d300ee0eaa2a9cf0e97ed5b3a47c1e73908ab
|
musicianlibrary-sponge/src/main/java/io/musician101/musicianlibrary/java/minecraft/sponge/gui/anvil/SpongeJumpToPage.java
|
musicianlibrary-sponge/src/main/java/io/musician101/musicianlibrary/java/minecraft/sponge/gui/anvil/SpongeJumpToPage.java
|
package io.musician101.musicianlibrary.java.minecraft.sponge.gui.anvil;
import java.util.function.BiConsumer;
import javax.annotation.Nonnull;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.Task;
public class SpongeJumpToPage<P extends PluginContainer> extends SpongeAnvilGUI {
public SpongeJumpToPage(@Nonnull P plugin, @Nonnull Player player, int maxPage, @Nonnull BiConsumer<Player, Integer> biConsumer) {
super(player, (p, name) -> {
int page;
try {
page = Integer.parseInt(name);
}
catch (NumberFormatException e) {
return "That is not a number!";
}
if (page > maxPage) {
return "Page cannot exceed " + maxPage;
}
Task.builder().delayTicks(1L).execute(() -> biConsumer.accept(player, page)).submit(plugin.getInstance().get());
return null;
});
}
}
|
package io.musician101.musicianlibrary.java.minecraft.sponge.gui.anvil;
import java.util.Optional;
import java.util.function.BiConsumer;
import javax.annotation.Nonnull;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.Task;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
public class SpongeJumpToPage<P extends PluginContainer> extends SpongeAnvilGUI {
public SpongeJumpToPage(@Nonnull P plugin, @Nonnull Player player, int maxPage, @Nonnull BiConsumer<Player, Integer> biConsumer) {
super(player, (p, name) -> {
int page;
try {
page = Integer.parseInt(name);
}
catch (NumberFormatException e) {
return "That is not a number!";
}
if (page > maxPage) {
return "Page cannot exceed " + maxPage;
}
Optional instance = plugin.getInstance();
if (instance.isPresent()) {
Task.builder().delayTicks(1L).execute(() -> biConsumer.accept(player, page)).submit(instance.get());
}
else {
player.closeInventory();
player.sendMessage(Text.builder(plugin.getId() + " does not have a valid instance. This GUI will not work until it does.").color(TextColors.RED).build());
}
return null;
});
}
}
|
Check if the PluginContainer contains an instance of the plugin.
|
Check if the PluginContainer contains an instance of the plugin.
|
Java
|
mit
|
Musician101/Common
|
java
|
## Code Before:
package io.musician101.musicianlibrary.java.minecraft.sponge.gui.anvil;
import java.util.function.BiConsumer;
import javax.annotation.Nonnull;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.Task;
public class SpongeJumpToPage<P extends PluginContainer> extends SpongeAnvilGUI {
public SpongeJumpToPage(@Nonnull P plugin, @Nonnull Player player, int maxPage, @Nonnull BiConsumer<Player, Integer> biConsumer) {
super(player, (p, name) -> {
int page;
try {
page = Integer.parseInt(name);
}
catch (NumberFormatException e) {
return "That is not a number!";
}
if (page > maxPage) {
return "Page cannot exceed " + maxPage;
}
Task.builder().delayTicks(1L).execute(() -> biConsumer.accept(player, page)).submit(plugin.getInstance().get());
return null;
});
}
}
## Instruction:
Check if the PluginContainer contains an instance of the plugin.
## Code After:
package io.musician101.musicianlibrary.java.minecraft.sponge.gui.anvil;
import java.util.Optional;
import java.util.function.BiConsumer;
import javax.annotation.Nonnull;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.Task;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
public class SpongeJumpToPage<P extends PluginContainer> extends SpongeAnvilGUI {
public SpongeJumpToPage(@Nonnull P plugin, @Nonnull Player player, int maxPage, @Nonnull BiConsumer<Player, Integer> biConsumer) {
super(player, (p, name) -> {
int page;
try {
page = Integer.parseInt(name);
}
catch (NumberFormatException e) {
return "That is not a number!";
}
if (page > maxPage) {
return "Page cannot exceed " + maxPage;
}
Optional instance = plugin.getInstance();
if (instance.isPresent()) {
Task.builder().delayTicks(1L).execute(() -> biConsumer.accept(player, page)).submit(instance.get());
}
else {
player.closeInventory();
player.sendMessage(Text.builder(plugin.getId() + " does not have a valid instance. This GUI will not work until it does.").color(TextColors.RED).build());
}
return null;
});
}
}
|
...
package io.musician101.musicianlibrary.java.minecraft.sponge.gui.anvil;
import java.util.Optional;
import java.util.function.BiConsumer;
import javax.annotation.Nonnull;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.Task;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
public class SpongeJumpToPage<P extends PluginContainer> extends SpongeAnvilGUI {
...
return "Page cannot exceed " + maxPage;
}
Optional instance = plugin.getInstance();
if (instance.isPresent()) {
Task.builder().delayTicks(1L).execute(() -> biConsumer.accept(player, page)).submit(instance.get());
}
else {
player.closeInventory();
player.sendMessage(Text.builder(plugin.getId() + " does not have a valid instance. This GUI will not work until it does.").color(TextColors.RED).build());
}
return null;
});
}
...
|
301f62a80140c319735d37fdab80b66712722de0
|
h2o-bindings/bin/custom/R/gen_isolationforest.py
|
h2o-bindings/bin/custom/R/gen_isolationforest.py
|
def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
Disable validation_response_column in R (only Python supported at first)
|
Disable validation_response_column in R (only Python supported at first)
|
Python
|
apache-2.0
|
michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3
|
python
|
## Code Before:
def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
## Instruction:
Disable validation_response_column in R (only Python supported at first)
## Code After:
def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
// ... existing code ...
def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
// ... modified code ...
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
// ... rest of the code ...
|
9742e372a6ccca843120cb5b4e8135033d30cdd6
|
cauth/controllers/root.py
|
cauth/controllers/root.py
|
import logging
from pecan import expose, response, conf
from pecan.rest import RestController
from cauth.controllers import base, github, introspection
from cauth.utils.common import LOGOUT_MSG
logger = logging.getLogger(__name__)
class LogoutController(RestController):
@expose(template='login.html')
def get(self, **kwargs):
response.delete_cookie('auth_pubtkt', domain=conf.app.cookie_domain)
return dict(back='/', message=LOGOUT_MSG)
class RootController(object):
login = base.BaseLoginController()
login.github = github.GithubController()
login.githubAPIkey = github.PersonalAccessTokenGithubController()
about = introspection.IntrospectionController()
logout = LogoutController()
|
import logging
from pecan import expose, response, conf
from pecan.rest import RestController
from cauth.auth import base as exceptions
from cauth.controllers import base, github, introspection
from cauth.utils.common import LOGOUT_MSG
logger = logging.getLogger(__name__)
class LogoutController(RestController):
@expose(template='login.html')
def get(self, **kwargs):
response.delete_cookie('auth_pubtkt', domain=conf.app.cookie_domain)
return dict(back='/', message=LOGOUT_MSG)
class RootController(object):
login = base.BaseLoginController()
try:
login.github = github.GithubController()
login.githubAPIkey = github.PersonalAccessTokenGithubController()
except exceptions.AuthProtocolNotAvailableError as e:
logger.error("%s - skipping callback endpoint" % e.message)
about = introspection.IntrospectionController()
logout = LogoutController()
|
Fix app crashing at startup if some auth methods are not configured
|
Fix app crashing at startup if some auth methods are not configured
Change-Id: I201dbc646c6da39c5923a086a0498b7ccb854982
|
Python
|
apache-2.0
|
redhat-cip/cauth,enovance/cauth,redhat-cip/cauth,redhat-cip/cauth,enovance/cauth,enovance/cauth
|
python
|
## Code Before:
import logging
from pecan import expose, response, conf
from pecan.rest import RestController
from cauth.controllers import base, github, introspection
from cauth.utils.common import LOGOUT_MSG
logger = logging.getLogger(__name__)
class LogoutController(RestController):
@expose(template='login.html')
def get(self, **kwargs):
response.delete_cookie('auth_pubtkt', domain=conf.app.cookie_domain)
return dict(back='/', message=LOGOUT_MSG)
class RootController(object):
login = base.BaseLoginController()
login.github = github.GithubController()
login.githubAPIkey = github.PersonalAccessTokenGithubController()
about = introspection.IntrospectionController()
logout = LogoutController()
## Instruction:
Fix app crashing at startup if some auth methods are not configured
Change-Id: I201dbc646c6da39c5923a086a0498b7ccb854982
## Code After:
import logging
from pecan import expose, response, conf
from pecan.rest import RestController
from cauth.auth import base as exceptions
from cauth.controllers import base, github, introspection
from cauth.utils.common import LOGOUT_MSG
logger = logging.getLogger(__name__)
class LogoutController(RestController):
@expose(template='login.html')
def get(self, **kwargs):
response.delete_cookie('auth_pubtkt', domain=conf.app.cookie_domain)
return dict(back='/', message=LOGOUT_MSG)
class RootController(object):
login = base.BaseLoginController()
try:
login.github = github.GithubController()
login.githubAPIkey = github.PersonalAccessTokenGithubController()
except exceptions.AuthProtocolNotAvailableError as e:
logger.error("%s - skipping callback endpoint" % e.message)
about = introspection.IntrospectionController()
logout = LogoutController()
|
# ... existing code ...
from pecan import expose, response, conf
from pecan.rest import RestController
from cauth.auth import base as exceptions
from cauth.controllers import base, github, introspection
from cauth.utils.common import LOGOUT_MSG
# ... modified code ...
class RootController(object):
login = base.BaseLoginController()
try:
login.github = github.GithubController()
login.githubAPIkey = github.PersonalAccessTokenGithubController()
except exceptions.AuthProtocolNotAvailableError as e:
logger.error("%s - skipping callback endpoint" % e.message)
about = introspection.IntrospectionController()
logout = LogoutController()
# ... rest of the code ...
|
a783c1739a4e6629f428904901d674dedca971f9
|
l10n_ch_payment_slip/__manifest__.py
|
l10n_ch_payment_slip/__manifest__.py
|
{'name': 'Switzerland - ISR inpayment slip (PVR/BVR/ESR)',
'summary': 'Print inpayment slip from your invoices',
'version': '10.0.1.1.1',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': [
'account',
'account_invoicing',
'l10n_ch_base_bank',
'base_transaction_id', # OCA/bank-statement-reconcile
'web',
],
'data': [
"views/report_xml_templates.xml",
"views/bank.xml",
"views/account_invoice.xml",
"views/res_config_settings_views.xml",
"wizard/isr_batch_print.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"
],
'demo': [],
'auto_install': False,
'installable': True,
'images': [],
'external_dependencies': {
'python': [
'PyPDF2',
]
}
}
|
{'name': 'Switzerland - ISR inpayment slip (PVR/BVR/ESR)',
'summary': 'Print inpayment slip from your invoices',
'version': '10.0.1.1.1',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': [
'account',
'account_invoicing',
'l10n_ch_base_bank',
'base_transaction_id', # OCA/account-reconcile
'web',
'l10n_ch',
],
'data': [
"views/report_xml_templates.xml",
"views/bank.xml",
"views/account_invoice.xml",
"views/res_config_settings_views.xml",
"wizard/isr_batch_print.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"
],
'demo': [],
'auto_install': False,
'installable': True,
'images': [],
'external_dependencies': {
'python': [
'PyPDF2',
]
}
}
|
Fix dependency to remove std print isr button
|
Fix dependency to remove std print isr button
|
Python
|
agpl-3.0
|
brain-tec/l10n-switzerland,brain-tec/l10n-switzerland,brain-tec/l10n-switzerland
|
python
|
## Code Before:
{'name': 'Switzerland - ISR inpayment slip (PVR/BVR/ESR)',
'summary': 'Print inpayment slip from your invoices',
'version': '10.0.1.1.1',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': [
'account',
'account_invoicing',
'l10n_ch_base_bank',
'base_transaction_id', # OCA/bank-statement-reconcile
'web',
],
'data': [
"views/report_xml_templates.xml",
"views/bank.xml",
"views/account_invoice.xml",
"views/res_config_settings_views.xml",
"wizard/isr_batch_print.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"
],
'demo': [],
'auto_install': False,
'installable': True,
'images': [],
'external_dependencies': {
'python': [
'PyPDF2',
]
}
}
## Instruction:
Fix dependency to remove std print isr button
## Code After:
{'name': 'Switzerland - ISR inpayment slip (PVR/BVR/ESR)',
'summary': 'Print inpayment slip from your invoices',
'version': '10.0.1.1.1',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': [
'account',
'account_invoicing',
'l10n_ch_base_bank',
'base_transaction_id', # OCA/account-reconcile
'web',
'l10n_ch',
],
'data': [
"views/report_xml_templates.xml",
"views/bank.xml",
"views/account_invoice.xml",
"views/res_config_settings_views.xml",
"wizard/isr_batch_print.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"
],
'demo': [],
'auto_install': False,
'installable': True,
'images': [],
'external_dependencies': {
'python': [
'PyPDF2',
]
}
}
|
...
'account',
'account_invoicing',
'l10n_ch_base_bank',
'base_transaction_id', # OCA/account-reconcile
'web',
'l10n_ch',
],
'data': [
"views/report_xml_templates.xml",
...
|
bb6f4302937e477f23c4de0d6a265d1d6f8985a0
|
geometry_export.py
|
geometry_export.py
|
print "Loading ", __name__
import geometry, from_poser, to_lux
reload(geometry)
reload(from_poser)
reload(to_lux)
import from_poser, to_lux
class GeometryExporter(object):
def __init__(self, subject, convert_material = None,
write_mesh_parameters = None, options = {}):
geom = from_poser.get(subject)
if geom is None or geom.is_empty:
print "Mesh is empty."
self.write = lambda file: None
else:
print "Mesh has", geom.number_of_polygons, "polygons and",
print geom.number_of_points, "vertices"
mats = geom.materials
key = geom.material_key
if convert_material:
materials = [convert_material(mat, key) for mat in mats]
else:
materials = [' NamedMaterial "%s/%s"' % (key, mat.Name())
for mat in mats]
if options.get('compute_normals', True) in [True, 1, '1', 'true']:
geom.compute_normals()
for i in xrange(int(options.get('subdivisionlevel', 0))):
print " subdividing: pass", (i+1)
geom.subdivide()
to_lux.preprocess(geom)
self.write = lambda file: to_lux.write(file, geom, materials,
write_mesh_parameters)
|
print "Loading ", __name__
import geometry, from_poser, to_lux
reload(geometry)
reload(from_poser)
reload(to_lux)
import from_poser, to_lux
def get_materials(geometry, convert = None):
f = convert or (lambda mat, k: ' NamedMaterial "%s/%s"' % (k, mat.Name()))
return [f(mat, geometry.material_key) for mat in geometry.materials]
def preprocess(geometry, options = {}):
if options.get('compute_normals', True) in [True, 1, '1', 'true']:
geometry.compute_normals()
for i in xrange(int(options.get('subdivisionlevel', 0))):
print " subdividing: pass", (i+1)
geometry.subdivide()
class GeometryExporter(object):
def __init__(self, subject, convert_material = None,
write_mesh_parameters = None, options = {}):
geom = from_poser.get(subject)
if geom is None or geom.is_empty:
print "Mesh is empty."
self.write = lambda file: None
else:
print "Mesh has", geom.number_of_polygons, "polygons and",
print geom.number_of_points, "vertices"
materials = get_materials(geom, convert_material)
preprocess(geom, options)
to_lux.preprocess(geom)
self.write = lambda file: to_lux.write(file, geom, materials,
write_mesh_parameters)
|
Split off two functions from GeometryExporter.__init__
|
Split off two functions from GeometryExporter.__init__
|
Python
|
mit
|
odf/pydough
|
python
|
## Code Before:
print "Loading ", __name__
import geometry, from_poser, to_lux
reload(geometry)
reload(from_poser)
reload(to_lux)
import from_poser, to_lux
class GeometryExporter(object):
def __init__(self, subject, convert_material = None,
write_mesh_parameters = None, options = {}):
geom = from_poser.get(subject)
if geom is None or geom.is_empty:
print "Mesh is empty."
self.write = lambda file: None
else:
print "Mesh has", geom.number_of_polygons, "polygons and",
print geom.number_of_points, "vertices"
mats = geom.materials
key = geom.material_key
if convert_material:
materials = [convert_material(mat, key) for mat in mats]
else:
materials = [' NamedMaterial "%s/%s"' % (key, mat.Name())
for mat in mats]
if options.get('compute_normals', True) in [True, 1, '1', 'true']:
geom.compute_normals()
for i in xrange(int(options.get('subdivisionlevel', 0))):
print " subdividing: pass", (i+1)
geom.subdivide()
to_lux.preprocess(geom)
self.write = lambda file: to_lux.write(file, geom, materials,
write_mesh_parameters)
## Instruction:
Split off two functions from GeometryExporter.__init__
## Code After:
print "Loading ", __name__
import geometry, from_poser, to_lux
reload(geometry)
reload(from_poser)
reload(to_lux)
import from_poser, to_lux
def get_materials(geometry, convert = None):
f = convert or (lambda mat, k: ' NamedMaterial "%s/%s"' % (k, mat.Name()))
return [f(mat, geometry.material_key) for mat in geometry.materials]
def preprocess(geometry, options = {}):
if options.get('compute_normals', True) in [True, 1, '1', 'true']:
geometry.compute_normals()
for i in xrange(int(options.get('subdivisionlevel', 0))):
print " subdividing: pass", (i+1)
geometry.subdivide()
class GeometryExporter(object):
def __init__(self, subject, convert_material = None,
write_mesh_parameters = None, options = {}):
geom = from_poser.get(subject)
if geom is None or geom.is_empty:
print "Mesh is empty."
self.write = lambda file: None
else:
print "Mesh has", geom.number_of_polygons, "polygons and",
print geom.number_of_points, "vertices"
materials = get_materials(geom, convert_material)
preprocess(geom, options)
to_lux.preprocess(geom)
self.write = lambda file: to_lux.write(file, geom, materials,
write_mesh_parameters)
|
// ... existing code ...
reload(from_poser)
reload(to_lux)
import from_poser, to_lux
def get_materials(geometry, convert = None):
f = convert or (lambda mat, k: ' NamedMaterial "%s/%s"' % (k, mat.Name()))
return [f(mat, geometry.material_key) for mat in geometry.materials]
def preprocess(geometry, options = {}):
if options.get('compute_normals', True) in [True, 1, '1', 'true']:
geometry.compute_normals()
for i in xrange(int(options.get('subdivisionlevel', 0))):
print " subdividing: pass", (i+1)
geometry.subdivide()
class GeometryExporter(object):
// ... modified code ...
print "Mesh has", geom.number_of_polygons, "polygons and",
print geom.number_of_points, "vertices"
materials = get_materials(geom, convert_material)
preprocess(geom, options)
to_lux.preprocess(geom)
self.write = lambda file: to_lux.write(file, geom, materials,
write_mesh_parameters)
// ... rest of the code ...
|
432a7f72c790ca7ba18f4d575706461e337da593
|
src/hunter/const.py
|
src/hunter/const.py
|
import os
import site
import stat
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
import os
import site
import stat
import sys
import sysconfig
SITE_PACKAGES_PATHS = set()
for scheme in sysconfig.get_scheme_names():
for name in ['platlib', 'purelib']:
try:
SITE_PACKAGES_PATHS.add(sysconfig.get_path(name, scheme))
except KeyError:
pass
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
if sys.version_info >= (3, 10):
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
Use new method to get package paths that works without deprecations on Python 3.10
|
Use new method to get package paths that works without deprecations on Python 3.10
|
Python
|
bsd-2-clause
|
ionelmc/python-hunter
|
python
|
## Code Before:
import os
import site
import stat
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
## Instruction:
Use new method to get package paths that works without deprecations on Python 3.10
## Code After:
import os
import site
import stat
import sys
import sysconfig
SITE_PACKAGES_PATHS = set()
for scheme in sysconfig.get_scheme_names():
for name in ['platlib', 'purelib']:
try:
SITE_PACKAGES_PATHS.add(sysconfig.get_path(name, scheme))
except KeyError:
pass
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
if sys.version_info >= (3, 10):
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
// ... existing code ...
import site
import stat
import sys
import sysconfig
SITE_PACKAGES_PATHS = set()
for scheme in sysconfig.get_scheme_names():
for name in ['platlib', 'purelib']:
try:
SITE_PACKAGES_PATHS.add(sysconfig.get_path(name, scheme))
except KeyError:
pass
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
if sys.version_info >= (3, 10):
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
// ... rest of the code ...
|
99d0dc6a77144f39fce80b81247575d7c92ee4ac
|
footynews/db/models.py
|
footynews/db/models.py
|
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Article(Base):
"""Model for Articles"""
__tablename__ = 'articles'
_id = Column(Integer, primary_key=True)
source = Column(String)
title = Column(String)
url = Column(String)
author = Column(String)
date_published = Column(DateTime)
def __init__(self, article):
self.source = article.source
self.title = article.title
self.url = article.url
self.author = article.author
self.date_published = article.date_published
def __repr__(self):
return "<Article(source={0})>"
engine = create_engine('')
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
db_session = DBSession()
|
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Article(Base):
"""Model for Articles"""
__tablename__ = 'articles'
_id = Column(Integer, primary_key=True)
source = Column(String)
title = Column(String)
url = Column(String)
author = Column(String)
date_published = Column(DateTime)
def __init__(self, article):
self.source = article.source
self.title = article.title
self.url = article.url
self.author = article.author
self.date_published = article.date_published
def __repr__(self):
return ("<Article(source={0}, title={1}, url={2}, author={3}, "
"date_published={4})>".format(self.source, self.title,
self.url, self.author, self.date_published))
engine = create_engine('')
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
db_session = DBSession()
|
Define repr for Article model
|
Define repr for Article model
|
Python
|
apache-2.0
|
footynews/fn_backend
|
python
|
## Code Before:
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Article(Base):
"""Model for Articles"""
__tablename__ = 'articles'
_id = Column(Integer, primary_key=True)
source = Column(String)
title = Column(String)
url = Column(String)
author = Column(String)
date_published = Column(DateTime)
def __init__(self, article):
self.source = article.source
self.title = article.title
self.url = article.url
self.author = article.author
self.date_published = article.date_published
def __repr__(self):
return "<Article(source={0})>"
engine = create_engine('')
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
db_session = DBSession()
## Instruction:
Define repr for Article model
## Code After:
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Article(Base):
"""Model for Articles"""
__tablename__ = 'articles'
_id = Column(Integer, primary_key=True)
source = Column(String)
title = Column(String)
url = Column(String)
author = Column(String)
date_published = Column(DateTime)
def __init__(self, article):
self.source = article.source
self.title = article.title
self.url = article.url
self.author = article.author
self.date_published = article.date_published
def __repr__(self):
return ("<Article(source={0}, title={1}, url={2}, author={3}, "
"date_published={4})>".format(self.source, self.title,
self.url, self.author, self.date_published))
engine = create_engine('')
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
db_session = DBSession()
|
...
self.date_published = article.date_published
def __repr__(self):
return ("<Article(source={0}, title={1}, url={2}, author={3}, "
"date_published={4})>".format(self.source, self.title,
self.url, self.author, self.date_published))
engine = create_engine('')
Base.metadata.create_all(engine)
...
|
a99d07e02f69961be5096ce8575007cec7ec213d
|
photoshell/__main__.py
|
photoshell/__main__.py
|
import os
from gi.repository import GObject
from photoshell.config import Config
from photoshell.library import Library
from photoshell.views.slideshow import Slideshow
from photoshell.views.window import Window
c = Config({
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell'),
'dark_theme': True,
'import_path': '%Y-%m-%d/{original_filename}'
})
# Open photo viewer
library = Library(c)
Window(c, library, Slideshow())
if c.exists():
c.flush()
|
import os
import signal
from photoshell.config import Config
from photoshell.library import Library
from photoshell.views.slideshow import Slideshow
from photoshell.views.window import Window
c = Config({
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell'),
'dark_theme': True,
'import_path': '%Y-%m-%d/{original_filename}'
})
# Open photo viewer
library = Library(c)
signal.signal(signal.SIGINT, signal.SIG_DFL)
Window(c, library, Slideshow())
if c.exists():
c.flush()
|
Add a signal handler to handle SIGINTs
|
Add a signal handler to handle SIGINTs
Fixes #135
|
Python
|
mit
|
photoshell/photoshell,SamWhited/photoshell,campaul/photoshell
|
python
|
## Code Before:
import os
from gi.repository import GObject
from photoshell.config import Config
from photoshell.library import Library
from photoshell.views.slideshow import Slideshow
from photoshell.views.window import Window
c = Config({
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell'),
'dark_theme': True,
'import_path': '%Y-%m-%d/{original_filename}'
})
# Open photo viewer
library = Library(c)
Window(c, library, Slideshow())
if c.exists():
c.flush()
## Instruction:
Add a signal handler to handle SIGINTs
Fixes #135
## Code After:
import os
import signal
from photoshell.config import Config
from photoshell.library import Library
from photoshell.views.slideshow import Slideshow
from photoshell.views.window import Window
c = Config({
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell'),
'dark_theme': True,
'import_path': '%Y-%m-%d/{original_filename}'
})
# Open photo viewer
library = Library(c)
signal.signal(signal.SIGINT, signal.SIG_DFL)
Window(c, library, Slideshow())
if c.exists():
c.flush()
|
# ... existing code ...
import os
import signal
from photoshell.config import Config
from photoshell.library import Library
# ... modified code ...
# Open photo viewer
library = Library(c)
signal.signal(signal.SIGINT, signal.SIG_DFL)
Window(c, library, Slideshow())
if c.exists():
c.flush()
# ... rest of the code ...
|
ee17ff42931e718d77ac2180b23e750bedcd31d4
|
test/test_searchentities.py
|
test/test_searchentities.py
|
import unittest
from . import models
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
|
import mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
|
Add a test for wscompat conversion
|
Add a test for wscompat conversion
|
Python
|
mit
|
jeffweeksio/sir
|
python
|
## Code Before:
import unittest
from . import models
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
## Instruction:
Add a test for wscompat conversion
## Code After:
import mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
|
...
import mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
...
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
...
|
a95f74e926a9381e16688fbebd017f676d19b7a5
|
setup.py
|
setup.py
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name='Flask-Pushrod',
version='0.1-dev',
url='http://github.com/dontcare4free/flask-pushrod',
license='MIT',
author='Nullable',
author_email='[email protected]',
description='An API microframework based on the idea of that the UI is just yet another endpoint',
packages=['flask_pushrod', 'flask_pushrod.renderers'],
zip_safe=False,
platforms='any',
install_requires=[
'Werkzeug>=0.7',
'Flask>=0.9',
],
tests_require=[
'pytest>=2.2.4',
],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
raise SystemExit(pytest.main(self.test_args))
setup(
name='Flask-Pushrod',
version='0.1-dev',
url='http://github.com/dontcare4free/flask-pushrod',
license='MIT',
author='Nullable',
author_email='[email protected]',
description='An API microframework based on the idea of that the UI is just yet another endpoint',
packages=['flask_pushrod', 'flask_pushrod.renderers'],
zip_safe=False,
platforms='any',
install_requires=[
'Werkzeug>=0.7',
'Flask>=0.9',
],
tests_require=[
'pytest>=2.2.4',
],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Make sure that the error code is returned properly
|
Make sure that the error code is returned properly
|
Python
|
mit
|
teozkr/Flask-Pushrod,UYSio/Flask-Pushrod,teozkr/Flask-Pushrod
|
python
|
## Code Before:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name='Flask-Pushrod',
version='0.1-dev',
url='http://github.com/dontcare4free/flask-pushrod',
license='MIT',
author='Nullable',
author_email='[email protected]',
description='An API microframework based on the idea of that the UI is just yet another endpoint',
packages=['flask_pushrod', 'flask_pushrod.renderers'],
zip_safe=False,
platforms='any',
install_requires=[
'Werkzeug>=0.7',
'Flask>=0.9',
],
tests_require=[
'pytest>=2.2.4',
],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
## Instruction:
Make sure that the error code is returned properly
## Code After:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
raise SystemExit(pytest.main(self.test_args))
setup(
name='Flask-Pushrod',
version='0.1-dev',
url='http://github.com/dontcare4free/flask-pushrod',
license='MIT',
author='Nullable',
author_email='[email protected]',
description='An API microframework based on the idea of that the UI is just yet another endpoint',
packages=['flask_pushrod', 'flask_pushrod.renderers'],
zip_safe=False,
platforms='any',
install_requires=[
'Werkzeug>=0.7',
'Flask>=0.9',
],
tests_require=[
'pytest>=2.2.4',
],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
...
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
raise SystemExit(pytest.main(self.test_args))
setup(
...
|
614310c7aedf3273523352b1ee16660e7bbf9601
|
test/Analysis/security-syntax-checks.c
|
test/Analysis/security-syntax-checks.c
|
// RUN: %clang_analyze_cc1 %s -verify \
// RUN: -analyzer-checker=security.insecureAPI
void builtin_function_call_crash_fixes(char *c) {
__builtin_strncpy(c, "", 6); // expected-warning{{Call to function 'strncpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
__builtin_memset(c, '\0', (0)); // expected-warning{{Call to function 'memset' is insecure as it does not provide security checks introduced in the C11 standard.}}
__builtin_memcpy(c, c, 0); // expected-warning{{Call to function 'memcpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
}
|
// RUN: %clang_analyze_cc1 %s -verify \
// RUN: -analyzer-checker=security.insecureAPI
// RUN: %clang_analyze_cc1 %s -verify -std=gnu11 \
// RUN: -analyzer-checker=security.insecureAPI
// RUN: %clang_analyze_cc1 %s -verify -std=gnu99 \
// RUN: -analyzer-checker=security.insecureAPI
void builtin_function_call_crash_fixes(char *c) {
__builtin_strncpy(c, "", 6);
__builtin_memset(c, '\0', (0));
__builtin_memcpy(c, c, 0);
#if __STDC_VERSION__ > 199901
// expected-warning@-5{{Call to function 'strncpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
// expected-warning@-5{{Call to function 'memset' is insecure as it does not provide security checks introduced in the C11 standard.}}
// expected-warning@-5{{Call to function 'memcpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
#else
// expected-no-diagnostics
#endif
}
|
Fix test on PS4 which defaults to gnu99 which does not emit the expected warnings.
|
Fix test on PS4 which defaults to gnu99 which does not emit the expected warnings.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@358626 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang_analyze_cc1 %s -verify \
// RUN: -analyzer-checker=security.insecureAPI
void builtin_function_call_crash_fixes(char *c) {
__builtin_strncpy(c, "", 6); // expected-warning{{Call to function 'strncpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
__builtin_memset(c, '\0', (0)); // expected-warning{{Call to function 'memset' is insecure as it does not provide security checks introduced in the C11 standard.}}
__builtin_memcpy(c, c, 0); // expected-warning{{Call to function 'memcpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
}
## Instruction:
Fix test on PS4 which defaults to gnu99 which does not emit the expected warnings.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@358626 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_analyze_cc1 %s -verify \
// RUN: -analyzer-checker=security.insecureAPI
// RUN: %clang_analyze_cc1 %s -verify -std=gnu11 \
// RUN: -analyzer-checker=security.insecureAPI
// RUN: %clang_analyze_cc1 %s -verify -std=gnu99 \
// RUN: -analyzer-checker=security.insecureAPI
void builtin_function_call_crash_fixes(char *c) {
__builtin_strncpy(c, "", 6);
__builtin_memset(c, '\0', (0));
__builtin_memcpy(c, c, 0);
#if __STDC_VERSION__ > 199901
// expected-warning@-5{{Call to function 'strncpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
// expected-warning@-5{{Call to function 'memset' is insecure as it does not provide security checks introduced in the C11 standard.}}
// expected-warning@-5{{Call to function 'memcpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
#else
// expected-no-diagnostics
#endif
}
|
# ... existing code ...
// RUN: %clang_analyze_cc1 %s -verify \
// RUN: -analyzer-checker=security.insecureAPI
// RUN: %clang_analyze_cc1 %s -verify -std=gnu11 \
// RUN: -analyzer-checker=security.insecureAPI
// RUN: %clang_analyze_cc1 %s -verify -std=gnu99 \
// RUN: -analyzer-checker=security.insecureAPI
void builtin_function_call_crash_fixes(char *c) {
__builtin_strncpy(c, "", 6);
__builtin_memset(c, '\0', (0));
__builtin_memcpy(c, c, 0);
#if __STDC_VERSION__ > 199901
// expected-warning@-5{{Call to function 'strncpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
// expected-warning@-5{{Call to function 'memset' is insecure as it does not provide security checks introduced in the C11 standard.}}
// expected-warning@-5{{Call to function 'memcpy' is insecure as it does not provide security checks introduced in the C11 standard.}}
#else
// expected-no-diagnostics
#endif
}
# ... rest of the code ...
|
3955d10f5dd905610c9621046069ae8dacbb1c1e
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A simple python LOC count tool',
'author': 'Tihomir Saulic',
'url': 'http://github.com/tsaulic/pycount',
'download_url': 'http://github.com/tsaulic/pycount',
'author_email': 'tihomir[DOT]saulic[AT]gmail[DOT]com',
'version': '0.6.1',
'install_requires': ['binaryornot'],
'packages': ['pycount'],
'scripts': ['bin/pycount'],
'name': 'pycount'
}
setup(**config)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A simple python LOC count tool',
'author': 'Tihomir Saulic',
'url': 'http://github.com/tsaulic/pycount',
'download_url': 'http://github.com/tsaulic/pycount',
'author_email': 'tihomir[DOT]saulic[AT]gmail[DOT]com',
'version': '0.6.2',
'install_requires': ['binaryornot', 'pygal'],
'packages': ['pycount'],
'scripts': ['bin/pycount'],
'name': 'pycount'
}
setup(**config)
|
Add dependency and bump version.
|
Add dependency and bump version.
|
Python
|
mit
|
tsaulic/pycount
|
python
|
## Code Before:
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A simple python LOC count tool',
'author': 'Tihomir Saulic',
'url': 'http://github.com/tsaulic/pycount',
'download_url': 'http://github.com/tsaulic/pycount',
'author_email': 'tihomir[DOT]saulic[AT]gmail[DOT]com',
'version': '0.6.1',
'install_requires': ['binaryornot'],
'packages': ['pycount'],
'scripts': ['bin/pycount'],
'name': 'pycount'
}
setup(**config)
## Instruction:
Add dependency and bump version.
## Code After:
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A simple python LOC count tool',
'author': 'Tihomir Saulic',
'url': 'http://github.com/tsaulic/pycount',
'download_url': 'http://github.com/tsaulic/pycount',
'author_email': 'tihomir[DOT]saulic[AT]gmail[DOT]com',
'version': '0.6.2',
'install_requires': ['binaryornot', 'pygal'],
'packages': ['pycount'],
'scripts': ['bin/pycount'],
'name': 'pycount'
}
setup(**config)
|
// ... existing code ...
'url': 'http://github.com/tsaulic/pycount',
'download_url': 'http://github.com/tsaulic/pycount',
'author_email': 'tihomir[DOT]saulic[AT]gmail[DOT]com',
'version': '0.6.2',
'install_requires': ['binaryornot', 'pygal'],
'packages': ['pycount'],
'scripts': ['bin/pycount'],
'name': 'pycount'
// ... rest of the code ...
|
12d525b79e78d8e183d75a2b81221f7d18519897
|
tests/kernel_test.py
|
tests/kernel_test.py
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
Fix tests related to result collection
|
Fix tests related to result collection
|
Python
|
mit
|
vdjagilev/desefu
|
python
|
## Code Before:
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
remove_files = glob.glob('abcetc_*.json')
for rf in remove_files:
os.remove(rf)
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
files = glob.glob('abcetc_*.json')
assert len(files) == 1
with open(files[0], 'r') as file_data:
data = json.load(file_data)
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
## Instruction:
Fix tests related to result collection
## Code After:
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
import json
def test_get_module():
mod = Kernel.get_module('modules', 'file.Extension')
assert isinstance(mod, AbstractModule)
try:
mod = Kernel.get_module('modules', 'not.Exists')
except SystemExit:
assert True
try:
mod = Kernel.get_module('tests.modules', 'file.WrongModule')
except KeyError:
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
mc.files = [
'./tests/modules/file/extension_mocks/database.sqlite',
'./tests/modules/file/extension_mocks/database2.db'
]
module = Kernel.get_module('modules', 'file.Extension')
module.files = mc.files
module.args = ['db']
mc.modules.append(module)
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
assert data['modules'][0]['mod'] == 'file.Extension'
assert data['modules'][0]['files_count'] == 1
assert len(data['modules'][0]['files']) == 1
|
// ... existing code ...
from kernel.kernel import Kernel
from modules import AbstractModule
from kernel.config import Config
from kernel.result import Result
from kernel.module_chain import ModuleChain
import glob
import os
// ... modified code ...
assert True
def test_main_exec_search():
config = Config('./examples/phone_msg.yml', './tests/modules/file/extension_mocks')
result = Result(config)
Kernel.result = result
mc = ModuleChain()
mc.id = "abcetc"
...
Kernel.exec_search([mc])
data = Kernel.result.result[0]
assert data['module_chain_id'] == 'abcetc'
assert len(data['modules']) == 1
// ... rest of the code ...
|
f165467b5d16dea30e43724bbb65e505bc1d9013
|
transpiler/javatests/com/google/j2cl/transpiler/integration/selfreferencingnativetype/Foo.java
|
transpiler/javatests/com/google/j2cl/transpiler/integration/selfreferencingnativetype/Foo.java
|
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.j2cl.transpiler.integration.selfreferencingnativetype;
import jsinterop.annotations.JsType;
@JsType(namespace = "zoo")
public class Foo {
public static String getMe() {
return "me";
}
@JsType(isNative = true, name = "Foo", namespace = "zoo")
public static class ZooFoo {
public static native String getMe();
}
public static String getMeViaNative() {
return ZooFoo.getMe();
}
}
|
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.j2cl.transpiler.integration.selfreferencingnativetype;
import jsinterop.annotations.JsType;
@JsType(namespace = "zoo")
public class Foo {
public static String getMe() {
return "me";
}
// Refer to the implementation "zoo.Foo$impl" instead to avoid creating an "invalid" circular
// reference.
@JsType(isNative = true, name = "Foo$impl", namespace = "zoo")
public static class ZooFoo {
public static native String getMe();
}
public static String getMeViaNative() {
return ZooFoo.getMe();
}
}
|
Fix native reference so that it does not violate circularity rules between $impl and header files.
|
Fix native reference so that it does not violate circularity rules between $impl and header files.
PiperOrigin-RevId: 168731186
|
Java
|
apache-2.0
|
google/j2cl,google/j2cl,google/j2cl,google/j2cl,google/j2cl
|
java
|
## Code Before:
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.j2cl.transpiler.integration.selfreferencingnativetype;
import jsinterop.annotations.JsType;
@JsType(namespace = "zoo")
public class Foo {
public static String getMe() {
return "me";
}
@JsType(isNative = true, name = "Foo", namespace = "zoo")
public static class ZooFoo {
public static native String getMe();
}
public static String getMeViaNative() {
return ZooFoo.getMe();
}
}
## Instruction:
Fix native reference so that it does not violate circularity rules between $impl and header files.
PiperOrigin-RevId: 168731186
## Code After:
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.j2cl.transpiler.integration.selfreferencingnativetype;
import jsinterop.annotations.JsType;
@JsType(namespace = "zoo")
public class Foo {
public static String getMe() {
return "me";
}
// Refer to the implementation "zoo.Foo$impl" instead to avoid creating an "invalid" circular
// reference.
@JsType(isNative = true, name = "Foo$impl", namespace = "zoo")
public static class ZooFoo {
public static native String getMe();
}
public static String getMeViaNative() {
return ZooFoo.getMe();
}
}
|
// ... existing code ...
return "me";
}
// Refer to the implementation "zoo.Foo$impl" instead to avoid creating an "invalid" circular
// reference.
@JsType(isNative = true, name = "Foo$impl", namespace = "zoo")
public static class ZooFoo {
public static native String getMe();
}
// ... rest of the code ...
|
2da853601e9746663aed35b51db3bfc7640dc9c3
|
publisher/middleware.py
|
publisher/middleware.py
|
from threading import current_thread
class PublisherMiddleware(object):
_draft_status = {}
@staticmethod
def is_draft(request):
authenticated = request.user.is_authenticated() and request.user.is_staff
is_draft = 'edit' in request.GET and authenticated
return is_draft
def process_request(self, request):
PublisherMiddleware._draft_status[current_thread()] = self.is_draft(request)
@staticmethod
def process_response(request, response):
try:
del PublisherMiddleware._draft_status[current_thread()]
except KeyError:
pass
return response
@staticmethod
def get_draft_status():
try:
return PublisherMiddleware._draft_status[current_thread()]
except KeyError:
return False
def get_draft_status():
return PublisherMiddleware.get_draft_status()
|
from threading import current_thread
class PublisherMiddleware(object):
_draft_status = {}
@staticmethod
def is_draft(request):
authenticated = request.user.is_authenticated() and request.user.is_staff
is_draft = 'edit' in request.GET and authenticated
return is_draft
def process_request(self, request):
PublisherMiddleware._draft_status[current_thread()] = self.is_draft(request)
@staticmethod
def process_response(request, response):
del PublisherMiddleware._draft_status[current_thread()]
return response
@staticmethod
def get_draft_status():
try:
return PublisherMiddleware._draft_status[current_thread()]
except KeyError:
return False
def get_draft_status():
return PublisherMiddleware.get_draft_status()
|
Remove unecessary try.. except.. block from PublisherMiddleware.process_response().
|
Remove unecessary try.. except.. block from PublisherMiddleware.process_response().
The key should always be set by process_request(), which should always be called
before process_response().
|
Python
|
bsd-3-clause
|
wearehoods/django-model-publisher-ai,wearehoods/django-model-publisher-ai,jp74/django-model-publisher,jp74/django-model-publisher,wearehoods/django-model-publisher-ai,jp74/django-model-publisher
|
python
|
## Code Before:
from threading import current_thread
class PublisherMiddleware(object):
_draft_status = {}
@staticmethod
def is_draft(request):
authenticated = request.user.is_authenticated() and request.user.is_staff
is_draft = 'edit' in request.GET and authenticated
return is_draft
def process_request(self, request):
PublisherMiddleware._draft_status[current_thread()] = self.is_draft(request)
@staticmethod
def process_response(request, response):
try:
del PublisherMiddleware._draft_status[current_thread()]
except KeyError:
pass
return response
@staticmethod
def get_draft_status():
try:
return PublisherMiddleware._draft_status[current_thread()]
except KeyError:
return False
def get_draft_status():
return PublisherMiddleware.get_draft_status()
## Instruction:
Remove unecessary try.. except.. block from PublisherMiddleware.process_response().
The key should always be set by process_request(), which should always be called
before process_response().
## Code After:
from threading import current_thread
class PublisherMiddleware(object):
_draft_status = {}
@staticmethod
def is_draft(request):
authenticated = request.user.is_authenticated() and request.user.is_staff
is_draft = 'edit' in request.GET and authenticated
return is_draft
def process_request(self, request):
PublisherMiddleware._draft_status[current_thread()] = self.is_draft(request)
@staticmethod
def process_response(request, response):
del PublisherMiddleware._draft_status[current_thread()]
return response
@staticmethod
def get_draft_status():
try:
return PublisherMiddleware._draft_status[current_thread()]
except KeyError:
return False
def get_draft_status():
return PublisherMiddleware.get_draft_status()
|
...
@staticmethod
def process_response(request, response):
del PublisherMiddleware._draft_status[current_thread()]
return response
@staticmethod
...
|
67184babe7a89fab9d761ce1d030500e2af4425e
|
src/main/java/com/github/nylle/javafixture/Context.java
|
src/main/java/com/github/nylle/javafixture/Context.java
|
package com.github.nylle.javafixture;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class Context {
private final Configuration configuration;
private final Map<Integer, Object> cache = new ConcurrentHashMap<>();
public Context(Configuration configuration) {
if (configuration == null) {
throw new IllegalArgumentException("configuration: null");
}
this.configuration = configuration;
}
public Configuration getConfiguration() {
return configuration;
}
public boolean isCached(SpecimenType type) {
return cache.containsKey(type.hashCode());
}
public <T> T cached(SpecimenType type, T instance) {
cache.putIfAbsent(type.hashCode(), instance);
return (T) cache.get(type.hashCode());
}
public <T> T cached(SpecimenType type) {
return (T) cache.get(type.hashCode());
}
}
|
package com.github.nylle.javafixture;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class Context {
private final Configuration configuration;
private final Map<Integer, Object> cache;
public Context(Configuration configuration) {
if (configuration == null) {
throw new IllegalArgumentException("configuration: null");
}
this.configuration = configuration;
this.cache = new ConcurrentHashMap<>();
}
public Context(Configuration configuration, Map<Integer, Object> predefinedInstances) {
if (configuration == null) {
throw new IllegalArgumentException("configuration: null");
}
this.configuration = configuration;
this.cache = new ConcurrentHashMap<>(predefinedInstances);
}
public Configuration getConfiguration() {
return configuration;
}
public boolean isCached(SpecimenType type) {
return cache.containsKey(type.hashCode());
}
public <T> T cached(SpecimenType type, T instance) {
cache.putIfAbsent(type.hashCode(), instance);
return (T) cache.get(type.hashCode());
}
public <T> T cached(SpecimenType type) {
return (T) cache.get(type.hashCode());
}
public <T> T preDefined(SpecimenType type, T instance) {
return cache.containsKey(type.hashCode()) ? (T) cache.get(type.hashCode()) : instance;
}
}
|
Add read-only cache to context
|
JAVAFIXTURE-52: Add read-only cache to context
This will alow to predefine instances per type.
|
Java
|
mit
|
Nylle/JavaFixture
|
java
|
## Code Before:
package com.github.nylle.javafixture;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class Context {
private final Configuration configuration;
private final Map<Integer, Object> cache = new ConcurrentHashMap<>();
public Context(Configuration configuration) {
if (configuration == null) {
throw new IllegalArgumentException("configuration: null");
}
this.configuration = configuration;
}
public Configuration getConfiguration() {
return configuration;
}
public boolean isCached(SpecimenType type) {
return cache.containsKey(type.hashCode());
}
public <T> T cached(SpecimenType type, T instance) {
cache.putIfAbsent(type.hashCode(), instance);
return (T) cache.get(type.hashCode());
}
public <T> T cached(SpecimenType type) {
return (T) cache.get(type.hashCode());
}
}
## Instruction:
JAVAFIXTURE-52: Add read-only cache to context
This will alow to predefine instances per type.
## Code After:
package com.github.nylle.javafixture;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class Context {
private final Configuration configuration;
private final Map<Integer, Object> cache;
public Context(Configuration configuration) {
if (configuration == null) {
throw new IllegalArgumentException("configuration: null");
}
this.configuration = configuration;
this.cache = new ConcurrentHashMap<>();
}
public Context(Configuration configuration, Map<Integer, Object> predefinedInstances) {
if (configuration == null) {
throw new IllegalArgumentException("configuration: null");
}
this.configuration = configuration;
this.cache = new ConcurrentHashMap<>(predefinedInstances);
}
public Configuration getConfiguration() {
return configuration;
}
public boolean isCached(SpecimenType type) {
return cache.containsKey(type.hashCode());
}
public <T> T cached(SpecimenType type, T instance) {
cache.putIfAbsent(type.hashCode(), instance);
return (T) cache.get(type.hashCode());
}
public <T> T cached(SpecimenType type) {
return (T) cache.get(type.hashCode());
}
public <T> T preDefined(SpecimenType type, T instance) {
return cache.containsKey(type.hashCode()) ? (T) cache.get(type.hashCode()) : instance;
}
}
|
// ... existing code ...
public class Context {
private final Configuration configuration;
private final Map<Integer, Object> cache;
public Context(Configuration configuration) {
// ... modified code ...
}
this.configuration = configuration;
this.cache = new ConcurrentHashMap<>();
}
public Context(Configuration configuration, Map<Integer, Object> predefinedInstances) {
if (configuration == null) {
throw new IllegalArgumentException("configuration: null");
}
this.configuration = configuration;
this.cache = new ConcurrentHashMap<>(predefinedInstances);
}
public Configuration getConfiguration() {
...
public <T> T cached(SpecimenType type) {
return (T) cache.get(type.hashCode());
}
public <T> T preDefined(SpecimenType type, T instance) {
return cache.containsKey(type.hashCode()) ? (T) cache.get(type.hashCode()) : instance;
}
}
// ... rest of the code ...
|
fe229791e558fe9156e74d5e0ef3679c1a99fb13
|
includes/iloglistener.h
|
includes/iloglistener.h
|
// Copyright 2016 Airtame
#ifndef MANAGEDEVICE_LOGGING_ILOGLISTENER_H_
#define MANAGEDEVICE_LOGGING_ILOGLISTENER_H_
#include <string>
// Forward declarations.
enum class ELogLevel;
class ILogListener {
public:
virtual ~ILogListener() {}
virtual void Notify(const std::string& iLog, ELogLevel iLevel) = 0;
};
#endif // MANAGEDEVICE_LOGGING_ILOGLISTENER_H_
|
// Copyright 2016 Pierre Fourgeaud
#ifndef PF_ILOGLISTENER_H_
#define PF_ILOGLISTENER_H_
#include <string>
// Forward declarations.
enum class ELogLevel;
/**
* @brief The ILogListener class
*
* Define an interface to implement when creating a new LogListener.
* SimpleLogger provide 3 default listeners: Buffer, File, Console.
*
* Implement this class if you want to create your own listener.
*/
class ILogListener {
public:
/**
* Virtual destructor.
*/
virtual ~ILogListener() {}
/**
* Pure virtual method to implement when implementing this interface.
* Method called when getting notified.
*
* @params iLog The string representing the messag to log
* @params iLevel Log level of this message
*/
virtual void Notify(const std::string& iLog, ELogLevel iLevel) = 0;
};
#endif // PF_ILOGLISTENER_H_
|
Update ILogListener interface with docs
|
Update ILogListener interface with docs
|
C
|
mit
|
pierrefourgeaud/SimpleLogger
|
c
|
## Code Before:
// Copyright 2016 Airtame
#ifndef MANAGEDEVICE_LOGGING_ILOGLISTENER_H_
#define MANAGEDEVICE_LOGGING_ILOGLISTENER_H_
#include <string>
// Forward declarations.
enum class ELogLevel;
class ILogListener {
public:
virtual ~ILogListener() {}
virtual void Notify(const std::string& iLog, ELogLevel iLevel) = 0;
};
#endif // MANAGEDEVICE_LOGGING_ILOGLISTENER_H_
## Instruction:
Update ILogListener interface with docs
## Code After:
// Copyright 2016 Pierre Fourgeaud
#ifndef PF_ILOGLISTENER_H_
#define PF_ILOGLISTENER_H_
#include <string>
// Forward declarations.
enum class ELogLevel;
/**
* @brief The ILogListener class
*
* Define an interface to implement when creating a new LogListener.
* SimpleLogger provide 3 default listeners: Buffer, File, Console.
*
* Implement this class if you want to create your own listener.
*/
class ILogListener {
public:
/**
* Virtual destructor.
*/
virtual ~ILogListener() {}
/**
* Pure virtual method to implement when implementing this interface.
* Method called when getting notified.
*
* @params iLog The string representing the messag to log
* @params iLevel Log level of this message
*/
virtual void Notify(const std::string& iLog, ELogLevel iLevel) = 0;
};
#endif // PF_ILOGLISTENER_H_
|
# ... existing code ...
// Copyright 2016 Pierre Fourgeaud
#ifndef PF_ILOGLISTENER_H_
#define PF_ILOGLISTENER_H_
#include <string>
# ... modified code ...
// Forward declarations.
enum class ELogLevel;
/**
* @brief The ILogListener class
*
* Define an interface to implement when creating a new LogListener.
* SimpleLogger provide 3 default listeners: Buffer, File, Console.
*
* Implement this class if you want to create your own listener.
*/
class ILogListener {
public:
/**
* Virtual destructor.
*/
virtual ~ILogListener() {}
/**
* Pure virtual method to implement when implementing this interface.
* Method called when getting notified.
*
* @params iLog The string representing the messag to log
* @params iLevel Log level of this message
*/
virtual void Notify(const std::string& iLog, ELogLevel iLevel) = 0;
};
#endif // PF_ILOGLISTENER_H_
# ... rest of the code ...
|
62ae7cf530612fb65f104c805c29fe6d4491c007
|
src/se/vidstige/jadb/AdbServerLauncher.java
|
src/se/vidstige/jadb/AdbServerLauncher.java
|
package se.vidstige.jadb;
import java.io.IOException;
import java.util.Map;
/**
* Launches the ADB server
*/
public class AdbServerLauncher {
private final String executable;
private Subprocess subprocess;
public AdbServerLauncher(Subprocess subprocess, Map<String, String> environment) {
this.subprocess = subprocess;
this.executable = findAdbExecutable(environment);
}
private static String findAdbExecutable(Map<String, String> environment) {
String android_home = environment.get("ANDROID_HOME");
if (android_home == null || android_home.equals("")) {
return "adb";
}
return android_home + "/platform-tools/adb";
}
public void launch() throws IOException, InterruptedException {
Process p = subprocess.execute(new String[]{executable, "start-server"});
p.waitFor();
int exitValue = p.exitValue();
if (exitValue != 0) throw new IOException("adb exited with exit code: " + exitValue);
}
}
|
package se.vidstige.jadb;
import java.io.IOException;
import java.util.Map;
/**
* Launches the ADB server
*/
public class AdbServerLauncher {
private final String executable;
private Subprocess subprocess;
/**
* Creates a new launcher loading ADB from the environment.
*
* @param subprocess the sub-process.
* @param environment the environment to use to locate the ADB executable.
*/
public AdbServerLauncher(Subprocess subprocess, Map<String, String> environment) {
this(subprocess, findAdbExecutable(environment));
}
/**
* Creates a new launcher with the specified ADB.
*
* @param subprocess the sub-process.
* @param executable the location of the ADB executable.
*/
public AdbServerLauncher(Subprocess subprocess, String executable) {
this.subprocess = subprocess;
this.executable = executable;
}
private static String findAdbExecutable(Map<String, String> environment) {
String android_home = environment.get("ANDROID_HOME");
if (android_home == null || android_home.equals("")) {
return "adb";
}
return android_home + "/platform-tools/adb";
}
public void launch() throws IOException, InterruptedException {
Process p = subprocess.execute(new String[]{executable, "start-server"});
p.waitFor();
int exitValue = p.exitValue();
if (exitValue != 0) throw new IOException("adb exited with exit code: " + exitValue);
}
}
|
Allow the location of the ADB binary to be specified.
|
Allow the location of the ADB binary to be specified.
|
Java
|
apache-2.0
|
vidstige/jadb,vidstige/jadb
|
java
|
## Code Before:
package se.vidstige.jadb;
import java.io.IOException;
import java.util.Map;
/**
* Launches the ADB server
*/
public class AdbServerLauncher {
private final String executable;
private Subprocess subprocess;
public AdbServerLauncher(Subprocess subprocess, Map<String, String> environment) {
this.subprocess = subprocess;
this.executable = findAdbExecutable(environment);
}
private static String findAdbExecutable(Map<String, String> environment) {
String android_home = environment.get("ANDROID_HOME");
if (android_home == null || android_home.equals("")) {
return "adb";
}
return android_home + "/platform-tools/adb";
}
public void launch() throws IOException, InterruptedException {
Process p = subprocess.execute(new String[]{executable, "start-server"});
p.waitFor();
int exitValue = p.exitValue();
if (exitValue != 0) throw new IOException("adb exited with exit code: " + exitValue);
}
}
## Instruction:
Allow the location of the ADB binary to be specified.
## Code After:
package se.vidstige.jadb;
import java.io.IOException;
import java.util.Map;
/**
* Launches the ADB server
*/
public class AdbServerLauncher {
private final String executable;
private Subprocess subprocess;
/**
* Creates a new launcher loading ADB from the environment.
*
* @param subprocess the sub-process.
* @param environment the environment to use to locate the ADB executable.
*/
public AdbServerLauncher(Subprocess subprocess, Map<String, String> environment) {
this(subprocess, findAdbExecutable(environment));
}
/**
* Creates a new launcher with the specified ADB.
*
* @param subprocess the sub-process.
* @param executable the location of the ADB executable.
*/
public AdbServerLauncher(Subprocess subprocess, String executable) {
this.subprocess = subprocess;
this.executable = executable;
}
private static String findAdbExecutable(Map<String, String> environment) {
String android_home = environment.get("ANDROID_HOME");
if (android_home == null || android_home.equals("")) {
return "adb";
}
return android_home + "/platform-tools/adb";
}
public void launch() throws IOException, InterruptedException {
Process p = subprocess.execute(new String[]{executable, "start-server"});
p.waitFor();
int exitValue = p.exitValue();
if (exitValue != 0) throw new IOException("adb exited with exit code: " + exitValue);
}
}
|
// ... existing code ...
private final String executable;
private Subprocess subprocess;
/**
* Creates a new launcher loading ADB from the environment.
*
* @param subprocess the sub-process.
* @param environment the environment to use to locate the ADB executable.
*/
public AdbServerLauncher(Subprocess subprocess, Map<String, String> environment) {
this(subprocess, findAdbExecutable(environment));
}
/**
* Creates a new launcher with the specified ADB.
*
* @param subprocess the sub-process.
* @param executable the location of the ADB executable.
*/
public AdbServerLauncher(Subprocess subprocess, String executable) {
this.subprocess = subprocess;
this.executable = executable;
}
private static String findAdbExecutable(Map<String, String> environment) {
// ... rest of the code ...
|
76e5d94e12717db685b0c0c66e893d7e4365a57b
|
examples/connect.py
|
examples/connect.py
|
from psphere.client import Client
from psphere.scripting import BaseScript
class Connect(BaseScript):
def connect(self):
"""A simple connection test to login and print the server time."""
print(self.client.si.CurrentTime())
def main():
client = Client()
print('Successfully connected to %s' % client.server)
c = Connect(client)
c.connect()
client.logout()
if __name__ == '__main__':
main()
|
from psphere import config
from psphere.client import Client
def main(options):
"""A simple connection test to login and print the server time."""
server = config._config_value("general", "server", options.server)
if server is None:
raise ValueError("server must be supplied on command line"
" or in configuration file.")
username = config._config_value("general", "username", options.username)
if username is None:
raise ValueError("username must be supplied on command line"
" or in configuration file.")
password = config._config_value("general", "password", options.password)
if password is None:
raise ValueError("password must be supplied on command line"
" or in configuration file.")
client = Client(server=server, username=username, password=password)
print('Successfully connected to %s' % client.server)
print(client.si.CurrentTime())
client.logout()
if __name__ == "__main__":
from optparse import OptionParser
usage = "Usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("--server", dest="server",
help="The server to connect to for provisioning")
parser.add_option("--username", dest="username",
help="The username used to connect to the server")
parser.add_option("--password", dest="password",
help="The password used to connect to the server")
(options, args) = parser.parse_args()
main(options)
|
Update the script to accept arguments
|
Update the script to accept arguments
|
Python
|
apache-2.0
|
graphite-server/psphere,jkinred/psphere
|
python
|
## Code Before:
from psphere.client import Client
from psphere.scripting import BaseScript
class Connect(BaseScript):
def connect(self):
"""A simple connection test to login and print the server time."""
print(self.client.si.CurrentTime())
def main():
client = Client()
print('Successfully connected to %s' % client.server)
c = Connect(client)
c.connect()
client.logout()
if __name__ == '__main__':
main()
## Instruction:
Update the script to accept arguments
## Code After:
from psphere import config
from psphere.client import Client
def main(options):
"""A simple connection test to login and print the server time."""
server = config._config_value("general", "server", options.server)
if server is None:
raise ValueError("server must be supplied on command line"
" or in configuration file.")
username = config._config_value("general", "username", options.username)
if username is None:
raise ValueError("username must be supplied on command line"
" or in configuration file.")
password = config._config_value("general", "password", options.password)
if password is None:
raise ValueError("password must be supplied on command line"
" or in configuration file.")
client = Client(server=server, username=username, password=password)
print('Successfully connected to %s' % client.server)
print(client.si.CurrentTime())
client.logout()
if __name__ == "__main__":
from optparse import OptionParser
usage = "Usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("--server", dest="server",
help="The server to connect to for provisioning")
parser.add_option("--username", dest="username",
help="The username used to connect to the server")
parser.add_option("--password", dest="password",
help="The password used to connect to the server")
(options, args) = parser.parse_args()
main(options)
|
...
from psphere import config
from psphere.client import Client
def main(options):
"""A simple connection test to login and print the server time."""
server = config._config_value("general", "server", options.server)
if server is None:
raise ValueError("server must be supplied on command line"
" or in configuration file.")
username = config._config_value("general", "username", options.username)
if username is None:
raise ValueError("username must be supplied on command line"
" or in configuration file.")
password = config._config_value("general", "password", options.password)
if password is None:
raise ValueError("password must be supplied on command line"
" or in configuration file.")
client = Client(server=server, username=username, password=password)
print('Successfully connected to %s' % client.server)
print(client.si.CurrentTime())
client.logout()
if __name__ == "__main__":
from optparse import OptionParser
usage = "Usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("--server", dest="server",
help="The server to connect to for provisioning")
parser.add_option("--username", dest="username",
help="The username used to connect to the server")
parser.add_option("--password", dest="password",
help="The password used to connect to the server")
(options, args) = parser.parse_args()
main(options)
...
|
1b602b1fab3931f8efadaadce50412f479514ead
|
apm-sniffer/apm-agent-core/src/main/java/org/skywalking/apm/agent/core/plugin/interceptor/enhance/EnhancedInstance.java
|
apm-sniffer/apm-agent-core/src/main/java/org/skywalking/apm/agent/core/plugin/interceptor/enhance/EnhancedInstance.java
|
package org.skywalking.apm.agent.core.plugin.interceptor.enhance;
/**
* @author wusheng
*/
public interface EnhancedInstance {
Object getSkyWalkingDynamicField();
void setSkyWalkingDynamicField();
}
|
package org.skywalking.apm.agent.core.plugin.interceptor.enhance;
/**
* @author wusheng
*/
public interface EnhancedInstance {
Object getSkyWalkingDynamicField();
void setSkyWalkingDynamicField(Object value);
}
|
Fix a dynamic interface bug.
|
Fix a dynamic interface bug.
|
Java
|
apache-2.0
|
hanahmily/sky-walking,ascrutae/sky-walking,apache/skywalking,OpenSkywalking/skywalking,hanahmily/sky-walking,apache/skywalking,apache/skywalking,ascrutae/sky-walking,OpenSkywalking/skywalking,apache/skywalking,apache/skywalking,ascrutae/sky-walking,ascrutae/sky-walking,zhangkewei/sky-walking,zhangkewei/sky-walking,apache/skywalking,apache/skywalking
|
java
|
## Code Before:
package org.skywalking.apm.agent.core.plugin.interceptor.enhance;
/**
* @author wusheng
*/
public interface EnhancedInstance {
Object getSkyWalkingDynamicField();
void setSkyWalkingDynamicField();
}
## Instruction:
Fix a dynamic interface bug.
## Code After:
package org.skywalking.apm.agent.core.plugin.interceptor.enhance;
/**
* @author wusheng
*/
public interface EnhancedInstance {
Object getSkyWalkingDynamicField();
void setSkyWalkingDynamicField(Object value);
}
|
// ... existing code ...
public interface EnhancedInstance {
Object getSkyWalkingDynamicField();
void setSkyWalkingDynamicField(Object value);
}
// ... rest of the code ...
|
5350fc1edf761f253ce701d2d6113db4e17ed646
|
src/main/java/io/sigpipe/sing/graph/GraphMetrics.java
|
src/main/java/io/sigpipe/sing/graph/GraphMetrics.java
|
package io.sigpipe.sing.graph;
public class GraphMetrics implements Cloneable {
private long vertices;
private long leaves;
public GraphMetrics() {
}
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
GraphMetrics that = (GraphMetrics) obj;
return this.vertices == that.vertices
&& this.leaves == that.leaves;
}
public GraphMetrics(int vertices, int leaves) {
this.vertices = vertices;
this.leaves = leaves;
}
public void setVertexCount(long vertices) {
this.vertices = vertices;
}
public void setLeafCount(long leaves) {
this.leaves = leaves;
}
public void addVertex() {
this.vertices++;
}
public void addVertices(long vertices) {
this.vertices += vertices;
}
public void addLeaf() {
this.leaves++;
}
public void addLeaves(long leaves) {
this.leaves += leaves;
}
public long getVertexCount() {
return this.vertices;
}
public long getLeafCount() {
return this.leaves;
}
public String toString() {
return "V: " + this.vertices + ", L: " + this.leaves;
}
}
|
package io.sigpipe.sing.graph;
public class GraphMetrics implements Cloneable {
private long vertices;
private long leaves;
public GraphMetrics() {
}
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
GraphMetrics that = (GraphMetrics) obj;
return this.vertices == that.vertices
&& this.leaves == that.leaves;
}
public GraphMetrics(int vertices, int leaves) {
this.vertices = vertices;
this.leaves = leaves;
}
public void setVertexCount(long vertices) {
this.vertices = vertices;
}
public void setLeafCount(long leaves) {
this.leaves = leaves;
}
public void addVertex() {
this.vertices++;
}
public void addVertices(long vertices) {
this.vertices += vertices;
}
public void addLeaf() {
this.leaves++;
}
public void addLeaves(long leaves) {
this.leaves += leaves;
}
public void removeVertex() {
this.vertices--;
}
public void removeVertices(long vertices) {
this.vertices -= vertices;
}
public long getVertexCount() {
return this.vertices;
}
public long getLeafCount() {
return this.leaves;
}
public String toString() {
return "V: " + this.vertices + ", L: " + this.leaves;
}
}
|
Add methods to remove vertices
|
Add methods to remove vertices
|
Java
|
bsd-2-clause
|
malensek/sing
|
java
|
## Code Before:
package io.sigpipe.sing.graph;
public class GraphMetrics implements Cloneable {
private long vertices;
private long leaves;
public GraphMetrics() {
}
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
GraphMetrics that = (GraphMetrics) obj;
return this.vertices == that.vertices
&& this.leaves == that.leaves;
}
public GraphMetrics(int vertices, int leaves) {
this.vertices = vertices;
this.leaves = leaves;
}
public void setVertexCount(long vertices) {
this.vertices = vertices;
}
public void setLeafCount(long leaves) {
this.leaves = leaves;
}
public void addVertex() {
this.vertices++;
}
public void addVertices(long vertices) {
this.vertices += vertices;
}
public void addLeaf() {
this.leaves++;
}
public void addLeaves(long leaves) {
this.leaves += leaves;
}
public long getVertexCount() {
return this.vertices;
}
public long getLeafCount() {
return this.leaves;
}
public String toString() {
return "V: " + this.vertices + ", L: " + this.leaves;
}
}
## Instruction:
Add methods to remove vertices
## Code After:
package io.sigpipe.sing.graph;
public class GraphMetrics implements Cloneable {
private long vertices;
private long leaves;
public GraphMetrics() {
}
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
GraphMetrics that = (GraphMetrics) obj;
return this.vertices == that.vertices
&& this.leaves == that.leaves;
}
public GraphMetrics(int vertices, int leaves) {
this.vertices = vertices;
this.leaves = leaves;
}
public void setVertexCount(long vertices) {
this.vertices = vertices;
}
public void setLeafCount(long leaves) {
this.leaves = leaves;
}
public void addVertex() {
this.vertices++;
}
public void addVertices(long vertices) {
this.vertices += vertices;
}
public void addLeaf() {
this.leaves++;
}
public void addLeaves(long leaves) {
this.leaves += leaves;
}
public void removeVertex() {
this.vertices--;
}
public void removeVertices(long vertices) {
this.vertices -= vertices;
}
public long getVertexCount() {
return this.vertices;
}
public long getLeafCount() {
return this.leaves;
}
public String toString() {
return "V: " + this.vertices + ", L: " + this.leaves;
}
}
|
# ... existing code ...
this.leaves += leaves;
}
public void removeVertex() {
this.vertices--;
}
public void removeVertices(long vertices) {
this.vertices -= vertices;
}
public long getVertexCount() {
return this.vertices;
}
# ... rest of the code ...
|
57bb37d7579620005a49613ff90f0a2eec55a77e
|
backend/offers_web.py
|
backend/offers_web.py
|
import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count()
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
|
import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count().run(self._db)
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
|
Fix max elements in header
|
Fix max elements in header
|
Python
|
agpl-3.0
|
jilljenn/voyageavecmoi,jilljenn/voyageavecmoi,jilljenn/voyageavecmoi
|
python
|
## Code Before:
import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count()
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
## Instruction:
Fix max elements in header
## Code After:
import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count().run(self._db)
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
|
// ... existing code ...
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count().run(self._db)
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
// ... rest of the code ...
|
34e04331fae60e63d0bc0daf47161bc8507835b8
|
setup.py
|
setup.py
|
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='https://github.com/KushalP/serfclient-py',
author='Kushal Pisavadia',
author_email='[email protected]',
maintainer='Kushal Pisavadia',
maintainer_email='[email protected]',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python >= 0.4.0'],
tests_require=['pytest >= 2.5.2', 'pytest-cov >= 1.6'],
cmdclass={'test': PyTest}
)
|
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='https://github.com/KushalP/serfclient-py',
author='Kushal Pisavadia',
author_email='[email protected]',
maintainer='Kushal Pisavadia',
maintainer_email='[email protected]',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python >= 0.4.0'],
tests_require=['pytest >= 2.5.2',
'pytest-cov >= 1.6',
'python-coveralls >= 2.4.2'],
cmdclass={'test': PyTest}
)
|
Add python-coveralls as a test dependency
|
Add python-coveralls as a test dependency
This is so that we can push our coverage stats to coverage.io.
|
Python
|
mit
|
charleswhchan/serfclient-py,KushalP/serfclient-py
|
python
|
## Code Before:
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='https://github.com/KushalP/serfclient-py',
author='Kushal Pisavadia',
author_email='[email protected]',
maintainer='Kushal Pisavadia',
maintainer_email='[email protected]',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python >= 0.4.0'],
tests_require=['pytest >= 2.5.2', 'pytest-cov >= 1.6'],
cmdclass={'test': PyTest}
)
## Instruction:
Add python-coveralls as a test dependency
This is so that we can push our coverage stats to coverage.io.
## Code After:
import os
import sys
from serfclient import __version__
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
try:
long_description = open(os.path.join(os.path.dirname(__file__),
'README.rst')).read()
except:
long_description = None
setup(
name='serfclient',
version=__version__,
description='Python client for the Serf orchestration tool',
long_description=long_description,
url='https://github.com/KushalP/serfclient-py',
author='Kushal Pisavadia',
author_email='[email protected]',
maintainer='Kushal Pisavadia',
maintainer_email='[email protected]',
keywords=['Serf', 'orchestration', 'service discovery'],
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python >= 0.4.0'],
tests_require=['pytest >= 2.5.2',
'pytest-cov >= 1.6',
'python-coveralls >= 2.4.2'],
cmdclass={'test': PyTest}
)
|
// ... existing code ...
license='MIT',
packages=['serfclient'],
install_requires=['msgpack-python >= 0.4.0'],
tests_require=['pytest >= 2.5.2',
'pytest-cov >= 1.6',
'python-coveralls >= 2.4.2'],
cmdclass={'test': PyTest}
)
// ... rest of the code ...
|
1b7d84526ac7650f18851610ebef47bdfef828ea
|
scripts/galaxy/gedlab.py
|
scripts/galaxy/gedlab.py
|
from galaxy.datatypes.binary import Binary
import os
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
|
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
|
Fix pylint warning: Unused import os
|
Fix pylint warning: Unused import os
|
Python
|
bsd-3-clause
|
Winterflower/khmer,kdmurray91/khmer,souravsingh/khmer,jas14/khmer,kdmurray91/khmer,Winterflower/khmer,F1000Research/khmer,jas14/khmer,ged-lab/khmer,kdmurray91/khmer,ged-lab/khmer,F1000Research/khmer,ged-lab/khmer,F1000Research/khmer,jas14/khmer,Winterflower/khmer,souravsingh/khmer,souravsingh/khmer
|
python
|
## Code Before:
from galaxy.datatypes.binary import Binary
import os
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
## Instruction:
Fix pylint warning: Unused import os
## Code After:
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
|
# ... existing code ...
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
# ... rest of the code ...
|
9ae4ebf7e95cb301321911886cbb4041fae1eff6
|
bookmarks/search_indexes.py
|
bookmarks/search_indexes.py
|
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
Python
|
mit
|
incuna/incuna-bookmarks,incuna/incuna-bookmarks
|
python
|
## Code Before:
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
## Instruction:
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
## Code After:
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
# ... existing code ...
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
# ... rest of the code ...
|
440becfe243a982779cddcbca8d01ac630f8965b
|
kie-ml-dist/src/main/java/org/kie/server/swarm/ml/KieServerMain.java
|
kie-ml-dist/src/main/java/org/kie/server/swarm/ml/KieServerMain.java
|
package org.kie.server.swarm.ml;
import java.util.Arrays;
import org.kie.server.swarm.AbstractKieServerMain;
import org.wildfly.swarm.Swarm;
import org.wildfly.swarm.jaxrs.JAXRSArchive;
public class KieServerMain extends AbstractKieServerMain {
public static void main(String[] args) throws Exception {
Swarm container = new Swarm();
System.out.println("\tBuilding kie server deployable...");
JAXRSArchive deployment = createDeployment(container);
System.out.println("\tStaring Wildfly Swarm....");
container.start();
System.out.println("\tConfiguring kjars to be auto deployed to server " + Arrays.toString(args));
installKJars(args);
System.out.println("\tDeploying kie server ....");
container.deploy(deployment);
}
}
|
package org.kie.server.swarm.ml;
import java.util.Arrays;
import java.util.HashMap;
import org.kie.server.swarm.AbstractKieServerMain;
import org.wildfly.swarm.Swarm;
import org.wildfly.swarm.config.logging.Level;
import org.wildfly.swarm.jaxrs.JAXRSArchive;
import org.wildfly.swarm.logging.LoggingFraction;
public class KieServerMain extends AbstractKieServerMain {
public static void main(String[] args) throws Exception {
Swarm container = new Swarm();
System.out.println("\tBuilding kie server deployable...");
JAXRSArchive deployment = createDeployment(container);
container.fraction(
new LoggingFraction()
.consoleHandler("CONSOLE", c -> {
c.level(Level.INFO);
c.formatter("%d{HH:mm:ss,SSS} %-5p [%c] (%t) %s%e%n");
})
.rootLogger(Level.INFO, "CONSOLE")
);
System.out.println("\tStaring Wildfly Swarm....");
container.start();
System.out.println("\tConfiguring kjars to be auto deployed to server " + Arrays.toString(args));
installKJars(args);
System.out.println("\tDeploying kie server ....");
container.deploy(deployment);
}
}
|
Add Logging configuration into Kie-ML server
|
Add Logging configuration into Kie-ML server
|
Java
|
apache-2.0
|
jesuino/kie-ml,jesuino/kie-ml,jesuino/kie-ml
|
java
|
## Code Before:
package org.kie.server.swarm.ml;
import java.util.Arrays;
import org.kie.server.swarm.AbstractKieServerMain;
import org.wildfly.swarm.Swarm;
import org.wildfly.swarm.jaxrs.JAXRSArchive;
public class KieServerMain extends AbstractKieServerMain {
public static void main(String[] args) throws Exception {
Swarm container = new Swarm();
System.out.println("\tBuilding kie server deployable...");
JAXRSArchive deployment = createDeployment(container);
System.out.println("\tStaring Wildfly Swarm....");
container.start();
System.out.println("\tConfiguring kjars to be auto deployed to server " + Arrays.toString(args));
installKJars(args);
System.out.println("\tDeploying kie server ....");
container.deploy(deployment);
}
}
## Instruction:
Add Logging configuration into Kie-ML server
## Code After:
package org.kie.server.swarm.ml;
import java.util.Arrays;
import java.util.HashMap;
import org.kie.server.swarm.AbstractKieServerMain;
import org.wildfly.swarm.Swarm;
import org.wildfly.swarm.config.logging.Level;
import org.wildfly.swarm.jaxrs.JAXRSArchive;
import org.wildfly.swarm.logging.LoggingFraction;
public class KieServerMain extends AbstractKieServerMain {
public static void main(String[] args) throws Exception {
Swarm container = new Swarm();
System.out.println("\tBuilding kie server deployable...");
JAXRSArchive deployment = createDeployment(container);
container.fraction(
new LoggingFraction()
.consoleHandler("CONSOLE", c -> {
c.level(Level.INFO);
c.formatter("%d{HH:mm:ss,SSS} %-5p [%c] (%t) %s%e%n");
})
.rootLogger(Level.INFO, "CONSOLE")
);
System.out.println("\tStaring Wildfly Swarm....");
container.start();
System.out.println("\tConfiguring kjars to be auto deployed to server " + Arrays.toString(args));
installKJars(args);
System.out.println("\tDeploying kie server ....");
container.deploy(deployment);
}
}
|
# ... existing code ...
import java.util.Arrays;
import java.util.HashMap;
import org.kie.server.swarm.AbstractKieServerMain;
import org.wildfly.swarm.Swarm;
import org.wildfly.swarm.config.logging.Level;
import org.wildfly.swarm.jaxrs.JAXRSArchive;
import org.wildfly.swarm.logging.LoggingFraction;
public class KieServerMain extends AbstractKieServerMain {
# ... modified code ...
System.out.println("\tBuilding kie server deployable...");
JAXRSArchive deployment = createDeployment(container);
container.fraction(
new LoggingFraction()
.consoleHandler("CONSOLE", c -> {
c.level(Level.INFO);
c.formatter("%d{HH:mm:ss,SSS} %-5p [%c] (%t) %s%e%n");
})
.rootLogger(Level.INFO, "CONSOLE")
);
System.out.println("\tStaring Wildfly Swarm....");
container.start();
# ... rest of the code ...
|
3df573e44eb0a348161bef0fbf8da54546a5ca08
|
modules/openlmis-web/src/main/java/org/openlmis/web/controller/vaccine/inventory/LogTagTemperatureController.java
|
modules/openlmis-web/src/main/java/org/openlmis/web/controller/vaccine/inventory/LogTagTemperatureController.java
|
package org.openlmis.web.controller.vaccine.inventory;
/**
* Created by hassan on 5/27/17.
*/
public class LogTagTemperatureController {
}
|
package org.openlmis.web.controller.vaccine.inventory;
import org.openlmis.core.web.controller.BaseController;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import javax.servlet.http.HttpServletRequest;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
/**
* Created by hassan on 5/27/17.
*/
@Controller
@RequestMapping(value = "/log-tag-api/")
public class LogTagTemperatureController extends BaseController {
@RequestMapping(value="insert.json",method=GET, headers = ACCEPT_JSON)
public String save(@RequestBody String donor, HttpServletRequest request){
// String successResponse = String.format("Donor '%s' has been successfully saved");
System.out.println(donor);
System.out.println("Got the Rest API");
return donor;
}
}
|
Add log tag temperature controller
|
Add log tag temperature controller
|
Java
|
agpl-3.0
|
USAID-DELIVER-PROJECT/elmis,USAID-DELIVER-PROJECT/elmis,USAID-DELIVER-PROJECT/elmis,USAID-DELIVER-PROJECT/elmis
|
java
|
## Code Before:
package org.openlmis.web.controller.vaccine.inventory;
/**
* Created by hassan on 5/27/17.
*/
public class LogTagTemperatureController {
}
## Instruction:
Add log tag temperature controller
## Code After:
package org.openlmis.web.controller.vaccine.inventory;
import org.openlmis.core.web.controller.BaseController;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import javax.servlet.http.HttpServletRequest;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
/**
* Created by hassan on 5/27/17.
*/
@Controller
@RequestMapping(value = "/log-tag-api/")
public class LogTagTemperatureController extends BaseController {
@RequestMapping(value="insert.json",method=GET, headers = ACCEPT_JSON)
public String save(@RequestBody String donor, HttpServletRequest request){
// String successResponse = String.format("Donor '%s' has been successfully saved");
System.out.println(donor);
System.out.println("Got the Rest API");
return donor;
}
}
|
# ... existing code ...
package org.openlmis.web.controller.vaccine.inventory;
import org.openlmis.core.web.controller.BaseController;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import javax.servlet.http.HttpServletRequest;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
/**
* Created by hassan on 5/27/17.
*/
@Controller
@RequestMapping(value = "/log-tag-api/")
public class LogTagTemperatureController extends BaseController {
@RequestMapping(value="insert.json",method=GET, headers = ACCEPT_JSON)
public String save(@RequestBody String donor, HttpServletRequest request){
// String successResponse = String.format("Donor '%s' has been successfully saved");
System.out.println(donor);
System.out.println("Got the Rest API");
return donor;
}
}
# ... rest of the code ...
|
ea2086f0add4fe575edf62da9b94956a6f2a57ec
|
modules/container/src/main/java/io/liveoak/container/service/CodecInstallationCompleteService.java
|
modules/container/src/main/java/io/liveoak/container/service/CodecInstallationCompleteService.java
|
package io.liveoak.container.service;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StartException;
import org.jboss.msc.service.StopContext;
/**
* @author Ken Finnigan
*/
public class CodecInstallationCompleteService implements Service<Void> {
@Override
public void start(StartContext context) throws StartException {
//Do Nothing
}
@Override
public void stop(StopContext context) {
}
@Override
public Void getValue() throws IllegalStateException, IllegalArgumentException {
return null;
}
}
|
package io.liveoak.container.service;
import org.jboss.logging.Logger;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StartException;
import org.jboss.msc.service.StopContext;
/**
* @author Ken Finnigan
*/
public class CodecInstallationCompleteService implements Service<Void> {
@Override
public void start(StartContext context) throws StartException {
//Do Nothing
log.trace("Codec installation complete.");
}
@Override
public void stop(StopContext context) {
}
@Override
public Void getValue() throws IllegalStateException, IllegalArgumentException {
return null;
}
static final Logger log = Logger.getLogger(CodecInstallationCompleteService.class);
}
|
Add trace message stating codec installation is complete
|
Add trace message stating codec installation is complete
|
Java
|
epl-1.0
|
liveoak-io/liveoak,kyroskoh/liveoak,liveoak-io/liveoak,ammendonca/liveoak,kyroskoh/liveoak,liveoak-io/liveoak,ammendonca/liveoak,ammendonca/liveoak,ammendonca/liveoak,kyroskoh/liveoak,ljshj/liveoak,ljshj/liveoak,liveoak-io/liveoak,ljshj/liveoak,kyroskoh/liveoak,ljshj/liveoak
|
java
|
## Code Before:
package io.liveoak.container.service;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StartException;
import org.jboss.msc.service.StopContext;
/**
* @author Ken Finnigan
*/
public class CodecInstallationCompleteService implements Service<Void> {
@Override
public void start(StartContext context) throws StartException {
//Do Nothing
}
@Override
public void stop(StopContext context) {
}
@Override
public Void getValue() throws IllegalStateException, IllegalArgumentException {
return null;
}
}
## Instruction:
Add trace message stating codec installation is complete
## Code After:
package io.liveoak.container.service;
import org.jboss.logging.Logger;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StartException;
import org.jboss.msc.service.StopContext;
/**
* @author Ken Finnigan
*/
public class CodecInstallationCompleteService implements Service<Void> {
@Override
public void start(StartContext context) throws StartException {
//Do Nothing
log.trace("Codec installation complete.");
}
@Override
public void stop(StopContext context) {
}
@Override
public Void getValue() throws IllegalStateException, IllegalArgumentException {
return null;
}
static final Logger log = Logger.getLogger(CodecInstallationCompleteService.class);
}
|
...
package io.liveoak.container.service;
import org.jboss.logging.Logger;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StartException;
...
@Override
public void start(StartContext context) throws StartException {
//Do Nothing
log.trace("Codec installation complete.");
}
@Override
...
public Void getValue() throws IllegalStateException, IllegalArgumentException {
return null;
}
static final Logger log = Logger.getLogger(CodecInstallationCompleteService.class);
}
...
|
c7372b1fa7f631fbad6381b8ceeadafa0ec02f36
|
kpi/migrations/0020_add_validate_submissions_permission_to_asset.py
|
kpi/migrations/0020_add_validate_submissions_permission_to_asset.py
|
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('kpi', '0017_assetversion_uid_aliases_20170608'),
]
operations = [
migrations.AlterModelOptions(
name='asset',
options={'ordering': ('-date_modified',), 'permissions': (('view_asset', 'Can view asset'), ('share_asset', "Can change asset's sharing settings"), ('add_submissions', 'Can submit data to asset'), ('view_submissions', 'Can view submitted data for asset'), ('change_submissions', 'Can modify submitted data for asset'), ('delete_submissions', 'Can delete submitted data for asset'), ('share_submissions', "Can change sharing settings for asset's submitted data"), ('validate_submissions', 'Can validate submitted data asset'), ('from_kc_only', 'INTERNAL USE ONLY; DO NOT ASSIGN'))},
),
migrations.AlterField(
model_name='asset',
name='_deployment_data',
field=jsonfield.fields.JSONField(default=dict),
),
migrations.AlterField(
model_name='asset',
name='asset_type',
field=models.CharField(default=b'survey', max_length=20, choices=[(b'text', b'text'), (b'question', b'question'), (b'block', b'block'), (b'survey', b'survey'), (b'empty', b'empty')]),
),
migrations.AlterField(
model_name='assetsnapshot',
name='details',
field=jsonfield.fields.JSONField(default=dict),
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('kpi', '0019_add_report_custom_field'),
]
operations = [
migrations.AlterModelOptions(
name='asset',
options={'ordering': ('-date_modified',), 'permissions': (('view_asset', 'Can view asset'), ('share_asset', "Can change asset's sharing settings"), ('add_submissions', 'Can submit data to asset'), ('view_submissions', 'Can view submitted data for asset'), ('change_submissions', 'Can modify submitted data for asset'), ('delete_submissions', 'Can delete submitted data for asset'), ('share_submissions', "Can change sharing settings for asset's submitted data"), ('validate_submissions', 'Can validate submitted data asset'), ('from_kc_only', 'INTERNAL USE ONLY; DO NOT ASSIGN'))},
),
]
|
Rename conflicting migration and remove extra
|
Rename conflicting migration and remove extra
changes autogenerated by `./manage.py makemigrations`
|
Python
|
agpl-3.0
|
kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi,onaio/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi,kobotoolbox/kpi
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('kpi', '0017_assetversion_uid_aliases_20170608'),
]
operations = [
migrations.AlterModelOptions(
name='asset',
options={'ordering': ('-date_modified',), 'permissions': (('view_asset', 'Can view asset'), ('share_asset', "Can change asset's sharing settings"), ('add_submissions', 'Can submit data to asset'), ('view_submissions', 'Can view submitted data for asset'), ('change_submissions', 'Can modify submitted data for asset'), ('delete_submissions', 'Can delete submitted data for asset'), ('share_submissions', "Can change sharing settings for asset's submitted data"), ('validate_submissions', 'Can validate submitted data asset'), ('from_kc_only', 'INTERNAL USE ONLY; DO NOT ASSIGN'))},
),
migrations.AlterField(
model_name='asset',
name='_deployment_data',
field=jsonfield.fields.JSONField(default=dict),
),
migrations.AlterField(
model_name='asset',
name='asset_type',
field=models.CharField(default=b'survey', max_length=20, choices=[(b'text', b'text'), (b'question', b'question'), (b'block', b'block'), (b'survey', b'survey'), (b'empty', b'empty')]),
),
migrations.AlterField(
model_name='assetsnapshot',
name='details',
field=jsonfield.fields.JSONField(default=dict),
),
]
## Instruction:
Rename conflicting migration and remove extra
changes autogenerated by `./manage.py makemigrations`
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('kpi', '0019_add_report_custom_field'),
]
operations = [
migrations.AlterModelOptions(
name='asset',
options={'ordering': ('-date_modified',), 'permissions': (('view_asset', 'Can view asset'), ('share_asset', "Can change asset's sharing settings"), ('add_submissions', 'Can submit data to asset'), ('view_submissions', 'Can view submitted data for asset'), ('change_submissions', 'Can modify submitted data for asset'), ('delete_submissions', 'Can delete submitted data for asset'), ('share_submissions', "Can change sharing settings for asset's submitted data"), ('validate_submissions', 'Can validate submitted data asset'), ('from_kc_only', 'INTERNAL USE ONLY; DO NOT ASSIGN'))},
),
]
|
// ... existing code ...
class Migration(migrations.Migration):
dependencies = [
('kpi', '0019_add_report_custom_field'),
]
operations = [
// ... modified code ...
name='asset',
options={'ordering': ('-date_modified',), 'permissions': (('view_asset', 'Can view asset'), ('share_asset', "Can change asset's sharing settings"), ('add_submissions', 'Can submit data to asset'), ('view_submissions', 'Can view submitted data for asset'), ('change_submissions', 'Can modify submitted data for asset'), ('delete_submissions', 'Can delete submitted data for asset'), ('share_submissions', "Can change sharing settings for asset's submitted data"), ('validate_submissions', 'Can validate submitted data asset'), ('from_kc_only', 'INTERNAL USE ONLY; DO NOT ASSIGN'))},
),
]
// ... rest of the code ...
|
b5c06c452100760c1f507b15f40132366c532ff3
|
src/java/org/apache/ddlutils/platform/HsqlDbBuilder.java
|
src/java/org/apache/ddlutils/platform/HsqlDbBuilder.java
|
package org.apache.ddlutils.platform;
/*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.ddlutils.PlatformInfo;
import org.apache.ddlutils.model.Table;
/**
* The SQL Builder for the HsqlDb database.
*
* @author James Strachan
* @author Thomas Dudziak
* @version $Revision$
*/
public class HsqlDbBuilder extends SqlBuilder
{
/**
* Creates a new builder instance.
*
* @param info The platform info
*/
public HsqlDbBuilder(PlatformInfo info)
{
super(info);
}
/**
* {@inheritDoc}
*/
public void dropTable(Table table) throws IOException
{
print("DROP TABLE ");
printIdentifier(getTableName(table));
print(" IF EXISTS");
printEndOfStatement();
}
}
|
package org.apache.ddlutils.platform;
/*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.ddlutils.PlatformInfo;
import org.apache.ddlutils.model.Table;
/**
* The SQL Builder for the HsqlDb database.
*
* @author James Strachan
* @author Thomas Dudziak
* @version $Revision$
*/
public class HsqlDbBuilder extends SqlBuilder
{
/**
* Creates a new builder instance.
*
* @param info The platform info
*/
public HsqlDbBuilder(PlatformInfo info)
{
super(info);
}
/**
* {@inheritDoc}
*/
public void dropTable(Table table) throws IOException
{
print("DROP TABLE ");
printIdentifier(getTableName(table));
print(" IF EXISTS");
printEndOfStatement();
}
/**
* @see org.apache.ddlutils.platform.SqlBuilder#getSelectLastInsertId(org.apache.ddlutils.model.Table)
*/
public String getSelectLastInsertId(Table table)
{
return "CALL IDENTITY()";
}
}
|
Add call identity to hsqldb.
|
Add call identity to hsqldb.
git-svn-id: 9ba7b058a3825ad085cc988a898da8cc978da7ce@330902 13f79535-47bb-0310-9956-ffa450edef68
|
Java
|
apache-2.0
|
nakag/ddlutils,nakag/ddlutils
|
java
|
## Code Before:
package org.apache.ddlutils.platform;
/*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.ddlutils.PlatformInfo;
import org.apache.ddlutils.model.Table;
/**
* The SQL Builder for the HsqlDb database.
*
* @author James Strachan
* @author Thomas Dudziak
* @version $Revision$
*/
public class HsqlDbBuilder extends SqlBuilder
{
/**
* Creates a new builder instance.
*
* @param info The platform info
*/
public HsqlDbBuilder(PlatformInfo info)
{
super(info);
}
/**
* {@inheritDoc}
*/
public void dropTable(Table table) throws IOException
{
print("DROP TABLE ");
printIdentifier(getTableName(table));
print(" IF EXISTS");
printEndOfStatement();
}
}
## Instruction:
Add call identity to hsqldb.
git-svn-id: 9ba7b058a3825ad085cc988a898da8cc978da7ce@330902 13f79535-47bb-0310-9956-ffa450edef68
## Code After:
package org.apache.ddlutils.platform;
/*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.ddlutils.PlatformInfo;
import org.apache.ddlutils.model.Table;
/**
* The SQL Builder for the HsqlDb database.
*
* @author James Strachan
* @author Thomas Dudziak
* @version $Revision$
*/
public class HsqlDbBuilder extends SqlBuilder
{
/**
* Creates a new builder instance.
*
* @param info The platform info
*/
public HsqlDbBuilder(PlatformInfo info)
{
super(info);
}
/**
* {@inheritDoc}
*/
public void dropTable(Table table) throws IOException
{
print("DROP TABLE ");
printIdentifier(getTableName(table));
print(" IF EXISTS");
printEndOfStatement();
}
/**
* @see org.apache.ddlutils.platform.SqlBuilder#getSelectLastInsertId(org.apache.ddlutils.model.Table)
*/
public String getSelectLastInsertId(Table table)
{
return "CALL IDENTITY()";
}
}
|
...
print(" IF EXISTS");
printEndOfStatement();
}
/**
* @see org.apache.ddlutils.platform.SqlBuilder#getSelectLastInsertId(org.apache.ddlutils.model.Table)
*/
public String getSelectLastInsertId(Table table)
{
return "CALL IDENTITY()";
}
}
...
|
358defbb6dd3b08405d9feebe6221e80d80489c2
|
modules/core/src/main/java/org/projectodd/wunderboss/codecs/Codecs.java
|
modules/core/src/main/java/org/projectodd/wunderboss/codecs/Codecs.java
|
/*
* Copyright 2014 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projectodd.wunderboss.codecs;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class Codecs {
public Codecs add(Codec codec) {
codecs.put(codec.name(), codec);
codecs.put(codec.contentType(), codec);
return this;
}
public Codec forName(String name) {
return codecs.get(name);
}
public Codec forContentType(String contentType) {
return codecs.get(contentType);
}
public Collection<Codec> codecs() {
return Collections.unmodifiableCollection(codecs.values());
}
private final Map<String, Codec> codecs = new HashMap<>();
}
|
/*
* Copyright 2014 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projectodd.wunderboss.codecs;
import java.util.*;
public class Codecs {
public Codecs add(Codec codec) {
codecs.put(codec.name(), codec);
codecs.put(codec.contentType(), codec);
return this;
}
public Codec forName(String name) {
return codecs.get(name);
}
public Codec forContentType(String contentType) {
return codecs.get(contentType);
}
public Set<Codec> codecs() {
return Collections.unmodifiableSet(new HashSet<>(codecs.values()));
}
private final Map<String, Codec> codecs = new HashMap<>();
}
|
Return a set of codecs instead of a collection to strip the duplicates.
|
Return a set of codecs instead of a collection to strip the duplicates.
Each codec is in the map twice, once for name and once for
content-type. We don't need both copies returned.
|
Java
|
apache-2.0
|
projectodd/wunderboss,projectodd/wunderboss,projectodd/wunderboss-release,projectodd/wunderboss-release,projectodd/wunderboss-release,projectodd/wunderboss
|
java
|
## Code Before:
/*
* Copyright 2014 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projectodd.wunderboss.codecs;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class Codecs {
public Codecs add(Codec codec) {
codecs.put(codec.name(), codec);
codecs.put(codec.contentType(), codec);
return this;
}
public Codec forName(String name) {
return codecs.get(name);
}
public Codec forContentType(String contentType) {
return codecs.get(contentType);
}
public Collection<Codec> codecs() {
return Collections.unmodifiableCollection(codecs.values());
}
private final Map<String, Codec> codecs = new HashMap<>();
}
## Instruction:
Return a set of codecs instead of a collection to strip the duplicates.
Each codec is in the map twice, once for name and once for
content-type. We don't need both copies returned.
## Code After:
/*
* Copyright 2014 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projectodd.wunderboss.codecs;
import java.util.*;
public class Codecs {
public Codecs add(Codec codec) {
codecs.put(codec.name(), codec);
codecs.put(codec.contentType(), codec);
return this;
}
public Codec forName(String name) {
return codecs.get(name);
}
public Codec forContentType(String contentType) {
return codecs.get(contentType);
}
public Set<Codec> codecs() {
return Collections.unmodifiableSet(new HashSet<>(codecs.values()));
}
private final Map<String, Codec> codecs = new HashMap<>();
}
|
// ... existing code ...
package org.projectodd.wunderboss.codecs;
import java.util.*;
public class Codecs {
// ... modified code ...
return codecs.get(contentType);
}
public Set<Codec> codecs() {
return Collections.unmodifiableSet(new HashSet<>(codecs.values()));
}
private final Map<String, Codec> codecs = new HashMap<>();
// ... rest of the code ...
|
092c316d474f15ccd59c56709ff1773cdf34365c
|
platform-tooling-support-tests/projects/gradle-kotlin-extensions/build.gradle.kts
|
platform-tooling-support-tests/projects/gradle-kotlin-extensions/build.gradle.kts
|
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
kotlin("jvm") version "1.3.10"
}
repositories {
mavenLocal()
mavenCentral()
maven { url = uri("https://oss.sonatype.org/content/repositories/snapshots") }
}
// don't use `build` as target to prevent Jenkins picking up
project.buildDir = file("bin")
// grab jupiter version from system environment
val jupiterVersion = System.getenv("JUNIT_JUPITER_VERSION")
dependencies {
testCompile(kotlin("stdlib-jdk8"))
testCompile("org.junit.jupiter:junit-jupiter:$jupiterVersion")
}
tasks.withType<KotlinCompile>().configureEach {
kotlinOptions {
jvmTarget = "1.8"
apiVersion = "1.1"
languageVersion = "1.1"
}
}
tasks.withType<Test>().configureEach {
useJUnitPlatform()
testLogging {
events("passed", "skipped", "failed")
}
}
|
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
kotlin("jvm") version "1.3.50"
}
repositories {
mavenLocal()
mavenCentral()
maven { url = uri("https://oss.sonatype.org/content/repositories/snapshots") }
}
// don't use `build` as target to prevent Jenkins picking up
project.buildDir = file("bin")
// grab jupiter version from system environment
val jupiterVersion = System.getenv("JUNIT_JUPITER_VERSION")
dependencies {
testImplementation(kotlin("stdlib-jdk8"))
testImplementation("org.junit.jupiter:junit-jupiter:$jupiterVersion")
}
tasks.withType<KotlinCompile>().configureEach {
kotlinOptions {
jvmTarget = "1.8"
apiVersion = "1.1"
languageVersion = "1.1"
}
}
tasks.test {
useJUnitPlatform()
testLogging {
events("passed", "skipped", "failed")
}
}
|
Make test compatible with Gradle 6
|
Make test compatible with Gradle 6
|
Kotlin
|
epl-1.0
|
junit-team/junit-lambda,sbrannen/junit-lambda
|
kotlin
|
## Code Before:
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
kotlin("jvm") version "1.3.10"
}
repositories {
mavenLocal()
mavenCentral()
maven { url = uri("https://oss.sonatype.org/content/repositories/snapshots") }
}
// don't use `build` as target to prevent Jenkins picking up
project.buildDir = file("bin")
// grab jupiter version from system environment
val jupiterVersion = System.getenv("JUNIT_JUPITER_VERSION")
dependencies {
testCompile(kotlin("stdlib-jdk8"))
testCompile("org.junit.jupiter:junit-jupiter:$jupiterVersion")
}
tasks.withType<KotlinCompile>().configureEach {
kotlinOptions {
jvmTarget = "1.8"
apiVersion = "1.1"
languageVersion = "1.1"
}
}
tasks.withType<Test>().configureEach {
useJUnitPlatform()
testLogging {
events("passed", "skipped", "failed")
}
}
## Instruction:
Make test compatible with Gradle 6
## Code After:
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
kotlin("jvm") version "1.3.50"
}
repositories {
mavenLocal()
mavenCentral()
maven { url = uri("https://oss.sonatype.org/content/repositories/snapshots") }
}
// don't use `build` as target to prevent Jenkins picking up
project.buildDir = file("bin")
// grab jupiter version from system environment
val jupiterVersion = System.getenv("JUNIT_JUPITER_VERSION")
dependencies {
testImplementation(kotlin("stdlib-jdk8"))
testImplementation("org.junit.jupiter:junit-jupiter:$jupiterVersion")
}
tasks.withType<KotlinCompile>().configureEach {
kotlinOptions {
jvmTarget = "1.8"
apiVersion = "1.1"
languageVersion = "1.1"
}
}
tasks.test {
useJUnitPlatform()
testLogging {
events("passed", "skipped", "failed")
}
}
|
# ... existing code ...
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
kotlin("jvm") version "1.3.50"
}
repositories {
# ... modified code ...
val jupiterVersion = System.getenv("JUNIT_JUPITER_VERSION")
dependencies {
testImplementation(kotlin("stdlib-jdk8"))
testImplementation("org.junit.jupiter:junit-jupiter:$jupiterVersion")
}
tasks.withType<KotlinCompile>().configureEach {
...
}
}
tasks.test {
useJUnitPlatform()
testLogging {
events("passed", "skipped", "failed")
# ... rest of the code ...
|
66eadda6a2a26cfef2f38449736183b8b5175022
|
pytest_django/__init__.py
|
pytest_django/__init__.py
|
from .plugin import *
from .funcargs import *
from .marks import *
|
from pytest_django.plugin import *
from pytest_django.funcargs import *
from pytest_django.marks import *
# When Python 2.5 support is dropped, these imports can be used instead:
# from .plugin import *
# from .funcargs import *
# from .marks import *
|
Make module imports 2.5 compatible
|
Make module imports 2.5 compatible
|
Python
|
bsd-3-clause
|
bforchhammer/pytest-django,thedrow/pytest-django,reincubate/pytest-django,aptivate/pytest-django,felixonmars/pytest-django,pelme/pytest-django,ojake/pytest-django,hoh/pytest-django,RonnyPfannschmidt/pytest_django,ktosiek/pytest-django,davidszotten/pytest-django,pombredanne/pytest_django,tomviner/pytest-django
|
python
|
## Code Before:
from .plugin import *
from .funcargs import *
from .marks import *
## Instruction:
Make module imports 2.5 compatible
## Code After:
from pytest_django.plugin import *
from pytest_django.funcargs import *
from pytest_django.marks import *
# When Python 2.5 support is dropped, these imports can be used instead:
# from .plugin import *
# from .funcargs import *
# from .marks import *
|
# ... existing code ...
from pytest_django.plugin import *
from pytest_django.funcargs import *
from pytest_django.marks import *
# When Python 2.5 support is dropped, these imports can be used instead:
# from .plugin import *
# from .funcargs import *
# from .marks import *
# ... rest of the code ...
|
cd8fe432077bdd65122189dd9191d7a5b8788e48
|
reinforcement-learning/play.py
|
reinforcement-learning/play.py
|
"""This is the agent which currently takes the action with highest immediate reward."""
import env
import time
env.make("pygame")
for episode in range(10):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
print(
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
max_action = -1
index = -1
for item in env.actions:
print(item)
print(env.reward(item))
if env.reward(item) > max_action:
print("greater")
max_action = env.reward(item)
action = [item, index]
else:
index += 1
print(action[0])
episode_reward += env.reward(action[0])
env.action(action[0])
env.render()
|
"""This is the agent which currently takes the action with highest immediate reward."""
import time
start = time.time()
import env
import rl
env.make("text")
for episode in range(1000):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
print(
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
action = rl.choose_action(rl.table[env.object[0]])
rl.q(env.player, action)
print(action)
episode_reward += env.reward(action)
env.action(action)
env.update()
print(rl.table[env.object[0]])
print("Finished after", str(time.time() - start), "seconds")
|
Use proper q learning for agent.
|
Use proper q learning for agent.
|
Python
|
mit
|
danieloconell/Louis
|
python
|
## Code Before:
"""This is the agent which currently takes the action with highest immediate reward."""
import env
import time
env.make("pygame")
for episode in range(10):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
print(
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
max_action = -1
index = -1
for item in env.actions:
print(item)
print(env.reward(item))
if env.reward(item) > max_action:
print("greater")
max_action = env.reward(item)
action = [item, index]
else:
index += 1
print(action[0])
episode_reward += env.reward(action[0])
env.action(action[0])
env.render()
## Instruction:
Use proper q learning for agent.
## Code After:
"""This is the agent which currently takes the action with highest immediate reward."""
import time
start = time.time()
import env
import rl
env.make("text")
for episode in range(1000):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
print(
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
action = rl.choose_action(rl.table[env.object[0]])
rl.q(env.player, action)
print(action)
episode_reward += env.reward(action)
env.action(action)
env.update()
print(rl.table[env.object[0]])
print("Finished after", str(time.time() - start), "seconds")
|
...
"""This is the agent which currently takes the action with highest immediate reward."""
import time
start = time.time()
import env
import rl
env.make("text")
for episode in range(1000):
env.reset()
episode_reward = 0
for t in range(100):
...
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
action = rl.choose_action(rl.table[env.object[0]])
rl.q(env.player, action)
print(action)
episode_reward += env.reward(action)
env.action(action)
env.update()
print(rl.table[env.object[0]])
print("Finished after", str(time.time() - start), "seconds")
...
|
d1bd82008c21942dee0ed29ba6d4f9eb54f2af33
|
issues/signals.py
|
issues/signals.py
|
from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal(providing_args=('request', 'issue'))
|
from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal() # Provides arguments: ('request', 'issue')
|
Remove documenting argument from Signal
|
Remove documenting argument from Signal
|
Python
|
mit
|
6aika/issue-reporting,6aika/issue-reporting,6aika/issue-reporting
|
python
|
## Code Before:
from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal(providing_args=('request', 'issue'))
## Instruction:
Remove documenting argument from Signal
## Code After:
from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal() # Provides arguments: ('request', 'issue')
|
// ... existing code ...
from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal() # Provides arguments: ('request', 'issue')
// ... rest of the code ...
|
9d94a753c4824df210753996edaa9f7910df5fa8
|
tests/test_sample_app.py
|
tests/test_sample_app.py
|
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
client.get('/')
|
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
resp = client.get('/')
assert resp.status == 200
|
Check for status code of 200 in sample app.
|
Check for status code of 200 in sample app.
|
Python
|
apache-2.0
|
JingZhou0404/flask-bootstrap,scorpiovn/flask-bootstrap,suvorom/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,suvorom/flask-bootstrap,victorbjorklund/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,livepy/flask-bootstrap,victorbjorklund/flask-bootstrap,dingocuster/flask-bootstrap,Coxious/flask-bootstrap,Coxious/flask-bootstrap,vishnugonela/flask-bootstrap,moha24/flask-bootstrap,eshijia/flask-bootstrap,dingocuster/flask-bootstrap,victorbjorklund/flask-bootstrap,eshijia/flask-bootstrap,vishnugonela/flask-bootstrap,JingZhou0404/flask-bootstrap,ser/flask-bootstrap,scorpiovn/flask-bootstrap,vishnugonela/flask-bootstrap,livepy/flask-bootstrap,JingZhou0404/flask-bootstrap,dingocuster/flask-bootstrap,livepy/flask-bootstrap,moha24/flask-bootstrap,Coxious/flask-bootstrap,BeardedSteve/flask-bootstrap,eshijia/flask-bootstrap,suvorom/flask-bootstrap,scorpiovn/flask-bootstrap,moha24/flask-bootstrap
|
python
|
## Code Before:
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
client.get('/')
## Instruction:
Check for status code of 200 in sample app.
## Code After:
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
resp = client.get('/')
assert resp.status == 200
|
// ... existing code ...
def test_index(client):
resp = client.get('/')
assert resp.status == 200
// ... rest of the code ...
|
c2056d49e3dc4ca6bbabb42de8ce772c74c79bee
|
src/main/java/com/elmakers/mine/bukkit/api/spell/SpellResult.java
|
src/main/java/com/elmakers/mine/bukkit/api/spell/SpellResult.java
|
package com.elmakers.mine.bukkit.api.spell;
/**
* Every Spell will return a SpellResult when cast. This result
* will determine the messaging and effects used, as well as whether
* or not the Spell cast consumes its CastingCost costs.
*
* A Spell that fails to cast will not consume costs or register for cooldown.
*/
public enum SpellResult {
CAST,
AREA,
FIZZLE,
BACKFIRE,
FAIL,
CANCEL,
INSUFFICIENT_RESOURCES,
INSUFFICIENT_PERMISSION,
COOLDOWN,
NO_TARGET,
RESTRICTED,
TARGET_SELECTED,
PLAYER_REQUIRED,
WORLD_REQUIRED,
INVALID_WORLD,
COST_FREE;
/**
* Determine if this result is a success or not.
*
* @return True if this cast was a success.
*/
public boolean isSuccess() {
return this == CAST || this == AREA || this == FIZZLE || this == BACKFIRE;
}
}
|
package com.elmakers.mine.bukkit.api.spell;
/**
* Every Spell will return a SpellResult when cast. This result
* will determine the messaging and effects used, as well as whether
* or not the Spell cast consumes its CastingCost costs.
*
* A Spell that fails to cast will not consume costs or register for cooldown.
*/
public enum SpellResult {
CAST,
AREA,
FIZZLE,
BACKFIRE,
FAIL,
CANCEL,
INSUFFICIENT_RESOURCES,
INSUFFICIENT_PERMISSION,
COOLDOWN,
NO_TARGET,
RESTRICTED,
TARGET_SELECTED,
ENTITY_REQUIRED,
LIVING_ENTITY_REQUIRED,
PLAYER_REQUIRED,
LOCATION_REQUIRED,
WORLD_REQUIRED,
INVALID_WORLD,
COST_FREE;
/**
* Determine if this result is a success or not.
*
* @return True if this cast was a success.
*/
public boolean isSuccess() {
return this == CAST || this == AREA || this == FIZZLE || this == BACKFIRE;
}
}
|
Add some new spell result types
|
Add some new spell result types
|
Java
|
mit
|
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicAPI
|
java
|
## Code Before:
package com.elmakers.mine.bukkit.api.spell;
/**
* Every Spell will return a SpellResult when cast. This result
* will determine the messaging and effects used, as well as whether
* or not the Spell cast consumes its CastingCost costs.
*
* A Spell that fails to cast will not consume costs or register for cooldown.
*/
public enum SpellResult {
CAST,
AREA,
FIZZLE,
BACKFIRE,
FAIL,
CANCEL,
INSUFFICIENT_RESOURCES,
INSUFFICIENT_PERMISSION,
COOLDOWN,
NO_TARGET,
RESTRICTED,
TARGET_SELECTED,
PLAYER_REQUIRED,
WORLD_REQUIRED,
INVALID_WORLD,
COST_FREE;
/**
* Determine if this result is a success or not.
*
* @return True if this cast was a success.
*/
public boolean isSuccess() {
return this == CAST || this == AREA || this == FIZZLE || this == BACKFIRE;
}
}
## Instruction:
Add some new spell result types
## Code After:
package com.elmakers.mine.bukkit.api.spell;
/**
* Every Spell will return a SpellResult when cast. This result
* will determine the messaging and effects used, as well as whether
* or not the Spell cast consumes its CastingCost costs.
*
* A Spell that fails to cast will not consume costs or register for cooldown.
*/
public enum SpellResult {
CAST,
AREA,
FIZZLE,
BACKFIRE,
FAIL,
CANCEL,
INSUFFICIENT_RESOURCES,
INSUFFICIENT_PERMISSION,
COOLDOWN,
NO_TARGET,
RESTRICTED,
TARGET_SELECTED,
ENTITY_REQUIRED,
LIVING_ENTITY_REQUIRED,
PLAYER_REQUIRED,
LOCATION_REQUIRED,
WORLD_REQUIRED,
INVALID_WORLD,
COST_FREE;
/**
* Determine if this result is a success or not.
*
* @return True if this cast was a success.
*/
public boolean isSuccess() {
return this == CAST || this == AREA || this == FIZZLE || this == BACKFIRE;
}
}
|
...
NO_TARGET,
RESTRICTED,
TARGET_SELECTED,
ENTITY_REQUIRED,
LIVING_ENTITY_REQUIRED,
PLAYER_REQUIRED,
LOCATION_REQUIRED,
WORLD_REQUIRED,
INVALID_WORLD,
COST_FREE;
...
|
0d38886bd130e3936d8c526570d5fe922d7b036f
|
Votron/src/com/cj/votron/ElectionsActivity.java
|
Votron/src/com/cj/votron/ElectionsActivity.java
|
/**
*
*/
package com.cj.votron;
import java.util.ArrayList;
import java.util.Arrays;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.widget.ArrayAdapter;
import android.widget.ListView;
/**
* @author gvamos
*
*/
public class ElectionsActivity extends Activity {
private ListView electionListView ;
private ArrayAdapter<String> listAdapter ;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(this.getClass().getName(),":onCreate");
setContentView(R.layout.activity_elections);
// Find the ListView resource.
electionListView = (ListView) findViewById( R.id.electionsListView );
// Create and populate a List of planet names.
String[] planets = new String[] { "London", "Paris", "Chatsworth" };
ArrayList<String> planetList = new ArrayList<String>();
planetList.addAll( Arrays.asList(planets) );
// Create ArrayAdapter using the planet list.
listAdapter = new ArrayAdapter<String>(this, R.layout.simplerow, planetList);
// Add more planets. If you passed a String[] instead of a List<String>
// into the ArrayAdapter constructor, you must not add more items.
// Otherwise an exception will occur.
listAdapter.add( "Foo" );
listAdapter.add( "Bar" );
listAdapter.add( "Baz" );
// Set the ArrayAdapter as the ListView's adapter.
electionListView.setAdapter( listAdapter );
}
}
|
/**
*
*/
package com.cj.votron;
import java.util.ArrayList;
import java.util.Arrays;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.widget.ArrayAdapter;
import android.widget.ListView;
/**
* @author gvamos
*
*/
public class ElectionsActivity extends Activity {
private ListView electionListView ;
private ArrayAdapter<String> listAdapter ;
private Config.Elections elections;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(this.getClass().getName(),":onCreate");
setContentView(R.layout.activity_elections);
elections = Config.getInstance().getElections();
elections.updateElections();
// Shave the yak.
electionListView = (ListView) findViewById( R.id.electionsListView );
listAdapter = new ArrayAdapter<String>(this, R.layout.simplerow, (elections.getElectionsList()));
electionListView.setAdapter( listAdapter );
}
}
|
Refactor to use central config object
|
Refactor to use central config object
|
Java
|
mit
|
gvamos/votron,gvamos/votron
|
java
|
## Code Before:
/**
*
*/
package com.cj.votron;
import java.util.ArrayList;
import java.util.Arrays;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.widget.ArrayAdapter;
import android.widget.ListView;
/**
* @author gvamos
*
*/
public class ElectionsActivity extends Activity {
private ListView electionListView ;
private ArrayAdapter<String> listAdapter ;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(this.getClass().getName(),":onCreate");
setContentView(R.layout.activity_elections);
// Find the ListView resource.
electionListView = (ListView) findViewById( R.id.electionsListView );
// Create and populate a List of planet names.
String[] planets = new String[] { "London", "Paris", "Chatsworth" };
ArrayList<String> planetList = new ArrayList<String>();
planetList.addAll( Arrays.asList(planets) );
// Create ArrayAdapter using the planet list.
listAdapter = new ArrayAdapter<String>(this, R.layout.simplerow, planetList);
// Add more planets. If you passed a String[] instead of a List<String>
// into the ArrayAdapter constructor, you must not add more items.
// Otherwise an exception will occur.
listAdapter.add( "Foo" );
listAdapter.add( "Bar" );
listAdapter.add( "Baz" );
// Set the ArrayAdapter as the ListView's adapter.
electionListView.setAdapter( listAdapter );
}
}
## Instruction:
Refactor to use central config object
## Code After:
/**
*
*/
package com.cj.votron;
import java.util.ArrayList;
import java.util.Arrays;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.widget.ArrayAdapter;
import android.widget.ListView;
/**
* @author gvamos
*
*/
public class ElectionsActivity extends Activity {
private ListView electionListView ;
private ArrayAdapter<String> listAdapter ;
private Config.Elections elections;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(this.getClass().getName(),":onCreate");
setContentView(R.layout.activity_elections);
elections = Config.getInstance().getElections();
elections.updateElections();
// Shave the yak.
electionListView = (ListView) findViewById( R.id.electionsListView );
listAdapter = new ArrayAdapter<String>(this, R.layout.simplerow, (elections.getElectionsList()));
electionListView.setAdapter( listAdapter );
}
}
|
...
*
*/
public class ElectionsActivity extends Activity {
private ListView electionListView ;
private ArrayAdapter<String> listAdapter ;
private Config.Elections elections;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(this.getClass().getName(),":onCreate");
setContentView(R.layout.activity_elections);
elections = Config.getInstance().getElections();
elections.updateElections();
// Shave the yak.
electionListView = (ListView) findViewById( R.id.electionsListView );
listAdapter = new ArrayAdapter<String>(this, R.layout.simplerow, (elections.getElectionsList()));
electionListView.setAdapter( listAdapter );
}
}
...
|
181318bbb9f2e4458b1188bfc8a8ada7f3b4b196
|
moderation_queue/urls.py
|
moderation_queue/urls.py
|
from django.conf.urls import patterns, url
from .views import upload_photo, PhotoUploadSuccess
urlpatterns = patterns('',
url(r'^photo/upload/(?P<popit_person_id>\d+)$',
upload_photo,
name="photo-upload"),
url(r'^photo/upload/success/(?P<popit_person_id>\d+)$',
PhotoUploadSuccess.as_view(),
name="photo-upload-success"),
)
|
from django.conf.urls import patterns, url
from .views import upload_photo, PhotoUploadSuccess
urlpatterns = patterns('',
url(r'^photo/upload/(?P<popit_person_id>\d+)$',
upload_photo,
name="photo-upload"),
url(r'^photo/upload/(?P<popit_person_id>\d+)/success$',
PhotoUploadSuccess.as_view(),
name="photo-upload-success"),
)
|
Rearrange the photo upload success URL for consistency
|
Rearrange the photo upload success URL for consistency
|
Python
|
agpl-3.0
|
datamade/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextmp-popit,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,openstate/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,openstate/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit
|
python
|
## Code Before:
from django.conf.urls import patterns, url
from .views import upload_photo, PhotoUploadSuccess
urlpatterns = patterns('',
url(r'^photo/upload/(?P<popit_person_id>\d+)$',
upload_photo,
name="photo-upload"),
url(r'^photo/upload/success/(?P<popit_person_id>\d+)$',
PhotoUploadSuccess.as_view(),
name="photo-upload-success"),
)
## Instruction:
Rearrange the photo upload success URL for consistency
## Code After:
from django.conf.urls import patterns, url
from .views import upload_photo, PhotoUploadSuccess
urlpatterns = patterns('',
url(r'^photo/upload/(?P<popit_person_id>\d+)$',
upload_photo,
name="photo-upload"),
url(r'^photo/upload/(?P<popit_person_id>\d+)/success$',
PhotoUploadSuccess.as_view(),
name="photo-upload-success"),
)
|
...
url(r'^photo/upload/(?P<popit_person_id>\d+)$',
upload_photo,
name="photo-upload"),
url(r'^photo/upload/(?P<popit_person_id>\d+)/success$',
PhotoUploadSuccess.as_view(),
name="photo-upload-success"),
)
...
|
5da32c725200d9f3b319be40ae5c2d302dc72249
|
cloudbridge/cloud/providers/azure/test/test_azure_resource_group.py
|
cloudbridge/cloud/providers/azure/test/test_azure_resource_group.py
|
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == "cloudbridge",
"Resource Group should be Cloudbridge")
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be Cloudbridge")
|
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == self.provider.resource_group,
"Resource Group should be {0}".format(rg.name))
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be {0}".format(rg.name))
|
Update resource group unit test
|
Update resource group unit test
|
Python
|
mit
|
gvlproject/libcloudbridge,gvlproject/cloudbridge
|
python
|
## Code Before:
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == "cloudbridge",
"Resource Group should be Cloudbridge")
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be Cloudbridge")
## Instruction:
Update resource group unit test
## Code After:
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureResourceGroupTestCase(ProviderTestBase):
def test_resource_group_create(self):
resource_group_params = {'location': self.provider.region_name}
rg = self.provider.azure_client. \
create_resource_group(self.provider.resource_group,
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == self.provider.resource_group,
"Resource Group should be {0}".format(rg.name))
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be {0}".format(rg.name))
|
# ... existing code ...
resource_group_params)
print("Create Resource - " + str(rg))
self.assertTrue(
rg.name == self.provider.resource_group,
"Resource Group should be {0}".format(rg.name))
def test_resource_group_get(self):
rg = self.provider.azure_client.get_resource_group('MyGroup')
# ... modified code ...
print("Get Resource - " + str(rg))
self.assertTrue(
rg.name == "testResourceGroup",
"Resource Group should be {0}".format(rg.name))
# ... rest of the code ...
|
a490a85e5842bd31a99a94f2530dbbea2d1c2584
|
bot/game/launch.py
|
bot/game/launch.py
|
import telegram
def callback_query_handler(bot: telegram.Bot, update: telegram.Update):
callback_query = update.callback_query
game_short_name = callback_query.game_short_name
if game_short_name == "rock_paper_scissors":
callback_query_id = callback_query.id
bot.answerCallbackQuery(callback_query_id, url="https://alvarogzp.github.io/telegram-games/rock-paper-scissors.html")
|
import telegram
def callback_query_handler(bot: telegram.Bot, update: telegram.Update):
callback_query = update.callback_query
game_short_name = callback_query.game_short_name
if game_short_name == "rock_paper_scissors":
callback_query_id = callback_query.id
bot.answerCallbackQuery(callback_query_id, url="https://rawgit.com/alvarogzp/telegram-games/develop/games/rock-paper-scissors/game.html")
|
Update url to point to return develop branch html page
|
Update url to point to return develop branch html page
|
Python
|
apache-2.0
|
alvarogzp/telegram-games,alvarogzp/telegram-games,alvarogzp/telegram-games,alvarogzp/telegram-games
|
python
|
## Code Before:
import telegram
def callback_query_handler(bot: telegram.Bot, update: telegram.Update):
callback_query = update.callback_query
game_short_name = callback_query.game_short_name
if game_short_name == "rock_paper_scissors":
callback_query_id = callback_query.id
bot.answerCallbackQuery(callback_query_id, url="https://alvarogzp.github.io/telegram-games/rock-paper-scissors.html")
## Instruction:
Update url to point to return develop branch html page
## Code After:
import telegram
def callback_query_handler(bot: telegram.Bot, update: telegram.Update):
callback_query = update.callback_query
game_short_name = callback_query.game_short_name
if game_short_name == "rock_paper_scissors":
callback_query_id = callback_query.id
bot.answerCallbackQuery(callback_query_id, url="https://rawgit.com/alvarogzp/telegram-games/develop/games/rock-paper-scissors/game.html")
|
// ... existing code ...
game_short_name = callback_query.game_short_name
if game_short_name == "rock_paper_scissors":
callback_query_id = callback_query.id
bot.answerCallbackQuery(callback_query_id, url="https://rawgit.com/alvarogzp/telegram-games/develop/games/rock-paper-scissors/game.html")
// ... rest of the code ...
|
95d86b30d8c5d922bc7ba17d50e5f83eae086e88
|
__init__.py
|
__init__.py
|
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
Check that each module is valid before trying to import.
|
Check that each module is valid before trying to import.
|
Python
|
mit
|
embaldridge/retriever,bendmorris/retriever,embaldridge/retriever,goelakash/retriever,henrykironde/deletedret,goelakash/retriever,davharris/retriever,davharris/retriever,embaldridge/retriever,henrykironde/deletedret,davharris/retriever,bendmorris/retriever,bendmorris/retriever
|
python
|
## Code Before:
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
## Instruction:
Check that each module is valid before trying to import.
## Code After:
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
# ... existing code ...
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
# ... modified code ...
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
# ... rest of the code ...
|
67d08d61186f7d9bc0026c1d867039f58872fee7
|
main.py
|
main.py
|
import cmd
import argparse
from Interface import *
class Lexeme(cmd.Cmd):
intro = "Welcome to Lexeme! Input '?' for help and commands."
prompt = "Enter command: "
def do_list(self, arg):
'List word database.'
listwords()
def do_quit(self, arg):
quit()
def do_add(self, arg):
add()
def do_decline(self, arg):
decline()
def do_statistics(self, arg):
statistics()
def do_search(self, arg):
search()
def do_generate(self, arg):
generate()
def do_export(self, arg):
export()
def do_batch(self, arg):
batchgenerate()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--database", help="set database file")
parser.add_argument("--config", help="set configuration file")
args = parser.parse_args()
if args.database is not None:
Library.loadDatabase(args.database)
else:
Library.loadDatabase()
if args.config is not None:
loadData(args.config)
else:
loadData()
Lexeme().cmdloop()
|
import cmd
import argparse
from Interface import *
class Lexeme(cmd.Cmd):
intro = "Welcome to Lexeme! Input '?' for help and commands."
prompt = "Enter command: "
def do_list(self, arg):
'List word database.'
listwords()
def do_quit(self, arg):
quit()
def do_add(self, arg):
add()
def do_decline(self, arg):
decline()
def do_statistics(self, arg):
statistics()
def do_search(self, arg):
search()
def do_generate(self, arg):
generate()
def do_export(self, arg):
export()
def do_batch(self, arg):
batchgenerate()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--database", help="set database file")
parser.add_argument("--config", help="set configuration file")
args = parser.parse_args()
if args.database is not None:
Library.loadDatabase(args.database)
else:
Library.loadDatabase()
if args.config is not None:
loadData(args.config)
else:
loadData()
clearScreen()
Lexeme().cmdloop()
|
Clear screen at start of program
|
Clear screen at start of program
|
Python
|
mit
|
kdelwat/Lexeme
|
python
|
## Code Before:
import cmd
import argparse
from Interface import *
class Lexeme(cmd.Cmd):
intro = "Welcome to Lexeme! Input '?' for help and commands."
prompt = "Enter command: "
def do_list(self, arg):
'List word database.'
listwords()
def do_quit(self, arg):
quit()
def do_add(self, arg):
add()
def do_decline(self, arg):
decline()
def do_statistics(self, arg):
statistics()
def do_search(self, arg):
search()
def do_generate(self, arg):
generate()
def do_export(self, arg):
export()
def do_batch(self, arg):
batchgenerate()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--database", help="set database file")
parser.add_argument("--config", help="set configuration file")
args = parser.parse_args()
if args.database is not None:
Library.loadDatabase(args.database)
else:
Library.loadDatabase()
if args.config is not None:
loadData(args.config)
else:
loadData()
Lexeme().cmdloop()
## Instruction:
Clear screen at start of program
## Code After:
import cmd
import argparse
from Interface import *
class Lexeme(cmd.Cmd):
intro = "Welcome to Lexeme! Input '?' for help and commands."
prompt = "Enter command: "
def do_list(self, arg):
'List word database.'
listwords()
def do_quit(self, arg):
quit()
def do_add(self, arg):
add()
def do_decline(self, arg):
decline()
def do_statistics(self, arg):
statistics()
def do_search(self, arg):
search()
def do_generate(self, arg):
generate()
def do_export(self, arg):
export()
def do_batch(self, arg):
batchgenerate()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--database", help="set database file")
parser.add_argument("--config", help="set configuration file")
args = parser.parse_args()
if args.database is not None:
Library.loadDatabase(args.database)
else:
Library.loadDatabase()
if args.config is not None:
loadData(args.config)
else:
loadData()
clearScreen()
Lexeme().cmdloop()
|
// ... existing code ...
else:
loadData()
clearScreen()
Lexeme().cmdloop()
// ... rest of the code ...
|
96bcf7f55a50895dead660add9fc949af197f550
|
networking_sfc/tests/functional/services/sfc/agent/extensions/test_ovs_agent_sfc_extension.py
|
networking_sfc/tests/functional/services/sfc/agent/extensions/test_ovs_agent_sfc_extension.py
|
from neutron.tests.functional.agent.l2 import base
class TestOVSAgentSfcExtension(base.OVSAgentTestFramework):
def setUp(self):
super(TestOVSAgentSfcExtension, self).setUp()
self.config.set_override('extensions', ['sfc'], 'agent')
def test_run(self):
agent = self.create_agent()
self.start_agent(agent)
agent_state = agent.state_rpc.report_state.call_args[0][1]
self.assertEqual(['sfc'], agent_state['configurations']['extensions'])
|
from neutron.tests.functional.agent.l2 import base
class TestOVSAgentSfcExtension(base.OVSAgentTestFramework):
def setUp(self):
super(TestOVSAgentSfcExtension, self).setUp()
self.config.set_override('extensions', ['sfc'], 'agent')
self.agent = self.create_agent()
def test_run(self):
self.agent._report_state()
agent_state = self.agent.state_rpc.report_state.call_args[0][1]
self.assertEqual(['sfc'], agent_state['configurations']['extensions'])
|
Fix extension loading functional test
|
Fix extension loading functional test
Call the agent _report_state() before checking the report state itself
Change-Id: Idbf552d5ca5968bc95b0a3c395499c3f2d215729
Closes-Bug: 1658089
|
Python
|
apache-2.0
|
openstack/networking-sfc,openstack/networking-sfc
|
python
|
## Code Before:
from neutron.tests.functional.agent.l2 import base
class TestOVSAgentSfcExtension(base.OVSAgentTestFramework):
def setUp(self):
super(TestOVSAgentSfcExtension, self).setUp()
self.config.set_override('extensions', ['sfc'], 'agent')
def test_run(self):
agent = self.create_agent()
self.start_agent(agent)
agent_state = agent.state_rpc.report_state.call_args[0][1]
self.assertEqual(['sfc'], agent_state['configurations']['extensions'])
## Instruction:
Fix extension loading functional test
Call the agent _report_state() before checking the report state itself
Change-Id: Idbf552d5ca5968bc95b0a3c395499c3f2d215729
Closes-Bug: 1658089
## Code After:
from neutron.tests.functional.agent.l2 import base
class TestOVSAgentSfcExtension(base.OVSAgentTestFramework):
def setUp(self):
super(TestOVSAgentSfcExtension, self).setUp()
self.config.set_override('extensions', ['sfc'], 'agent')
self.agent = self.create_agent()
def test_run(self):
self.agent._report_state()
agent_state = self.agent.state_rpc.report_state.call_args[0][1]
self.assertEqual(['sfc'], agent_state['configurations']['extensions'])
|
...
def setUp(self):
super(TestOVSAgentSfcExtension, self).setUp()
self.config.set_override('extensions', ['sfc'], 'agent')
self.agent = self.create_agent()
def test_run(self):
self.agent._report_state()
agent_state = self.agent.state_rpc.report_state.call_args[0][1]
self.assertEqual(['sfc'], agent_state['configurations']['extensions'])
...
|
ccdeb23eb54191913a97b48907e0738f6969ce58
|
tests/factories/config.py
|
tests/factories/config.py
|
from factory import SubFactory
from pycroft.model.config import Config
from .base import BaseFactory
from .finance import AccountFactory, BankAccountFactory
from .property import PropertyGroupFactory, MemberPropertyGroupFactory
class ConfigFactory(BaseFactory):
"""This is a dummy Config factory, Referencing PropertyGroups with
no a-priori property relationships and arbitrary Accounts.
"""
class Meta:
model = Config
id = 1
# `PropertyGroup`s
member_group = SubFactory(MemberPropertyGroupFactory)
network_access_group = SubFactory(PropertyGroupFactory)
violation_group = SubFactory(PropertyGroupFactory)
cache_group = SubFactory(PropertyGroupFactory)
traffic_limit_exceeded_group = SubFactory(PropertyGroupFactory)
external_group = SubFactory(PropertyGroupFactory)
payment_in_default_group = SubFactory(PropertyGroupFactory)
blocked_group = SubFactory(PropertyGroupFactory)
caretaker_group = SubFactory(PropertyGroupFactory)
treasurer_group = SubFactory(PropertyGroupFactory)
# `Account`s
membership_fee_account = SubFactory(AccountFactory)
membership_fee_bank_account = SubFactory(BankAccountFactory)
|
from factory import SubFactory
from pycroft.model.config import Config
from .base import BaseFactory
from .finance import AccountFactory, BankAccountFactory
from .property import PropertyGroupFactory, MemberPropertyGroupFactory
class ConfigFactory(BaseFactory):
"""This is a dummy Config factory, Referencing PropertyGroups with
no a-priori property relationships and arbitrary Accounts.
"""
class Meta:
model = Config
id = 1
# `PropertyGroup`s
member_group = SubFactory(MemberPropertyGroupFactory)
network_access_group = SubFactory(PropertyGroupFactory)
violation_group = SubFactory(PropertyGroupFactory)
cache_group = SubFactory(PropertyGroupFactory)
traffic_limit_exceeded_group = SubFactory(PropertyGroupFactory)
external_group = SubFactory(PropertyGroupFactory)
payment_in_default_group = SubFactory(PropertyGroupFactory,
granted=frozenset(("payment_in_default",)),
denied=frozenset(("network_access", "userwww", "userdb")))
blocked_group = SubFactory(PropertyGroupFactory)
caretaker_group = SubFactory(PropertyGroupFactory)
treasurer_group = SubFactory(PropertyGroupFactory)
# `Account`s
membership_fee_account = SubFactory(AccountFactory)
membership_fee_bank_account = SubFactory(BankAccountFactory)
|
Add correct properties for payment_in_default test group
|
Add correct properties for payment_in_default test group
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
python
|
## Code Before:
from factory import SubFactory
from pycroft.model.config import Config
from .base import BaseFactory
from .finance import AccountFactory, BankAccountFactory
from .property import PropertyGroupFactory, MemberPropertyGroupFactory
class ConfigFactory(BaseFactory):
"""This is a dummy Config factory, Referencing PropertyGroups with
no a-priori property relationships and arbitrary Accounts.
"""
class Meta:
model = Config
id = 1
# `PropertyGroup`s
member_group = SubFactory(MemberPropertyGroupFactory)
network_access_group = SubFactory(PropertyGroupFactory)
violation_group = SubFactory(PropertyGroupFactory)
cache_group = SubFactory(PropertyGroupFactory)
traffic_limit_exceeded_group = SubFactory(PropertyGroupFactory)
external_group = SubFactory(PropertyGroupFactory)
payment_in_default_group = SubFactory(PropertyGroupFactory)
blocked_group = SubFactory(PropertyGroupFactory)
caretaker_group = SubFactory(PropertyGroupFactory)
treasurer_group = SubFactory(PropertyGroupFactory)
# `Account`s
membership_fee_account = SubFactory(AccountFactory)
membership_fee_bank_account = SubFactory(BankAccountFactory)
## Instruction:
Add correct properties for payment_in_default test group
## Code After:
from factory import SubFactory
from pycroft.model.config import Config
from .base import BaseFactory
from .finance import AccountFactory, BankAccountFactory
from .property import PropertyGroupFactory, MemberPropertyGroupFactory
class ConfigFactory(BaseFactory):
"""This is a dummy Config factory, Referencing PropertyGroups with
no a-priori property relationships and arbitrary Accounts.
"""
class Meta:
model = Config
id = 1
# `PropertyGroup`s
member_group = SubFactory(MemberPropertyGroupFactory)
network_access_group = SubFactory(PropertyGroupFactory)
violation_group = SubFactory(PropertyGroupFactory)
cache_group = SubFactory(PropertyGroupFactory)
traffic_limit_exceeded_group = SubFactory(PropertyGroupFactory)
external_group = SubFactory(PropertyGroupFactory)
payment_in_default_group = SubFactory(PropertyGroupFactory,
granted=frozenset(("payment_in_default",)),
denied=frozenset(("network_access", "userwww", "userdb")))
blocked_group = SubFactory(PropertyGroupFactory)
caretaker_group = SubFactory(PropertyGroupFactory)
treasurer_group = SubFactory(PropertyGroupFactory)
# `Account`s
membership_fee_account = SubFactory(AccountFactory)
membership_fee_bank_account = SubFactory(BankAccountFactory)
|
# ... existing code ...
cache_group = SubFactory(PropertyGroupFactory)
traffic_limit_exceeded_group = SubFactory(PropertyGroupFactory)
external_group = SubFactory(PropertyGroupFactory)
payment_in_default_group = SubFactory(PropertyGroupFactory,
granted=frozenset(("payment_in_default",)),
denied=frozenset(("network_access", "userwww", "userdb")))
blocked_group = SubFactory(PropertyGroupFactory)
caretaker_group = SubFactory(PropertyGroupFactory)
treasurer_group = SubFactory(PropertyGroupFactory)
# ... rest of the code ...
|
6430785e60fcef9bbac3cf4e7c70981f5af6affa
|
fluent_contents/plugins/sharedcontent/models.py
|
fluent_contents/plugins/sharedcontent/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
# Adding the reverse relation for ContentItem objects
# causes the admin to list these objects when moving the shared content
contentitem_set = ContentItemRelation()
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
|
Add ContentItemRelation to SharedContent model
|
Add ContentItemRelation to SharedContent model
Displays objects in the admin delete screen.
|
Python
|
apache-2.0
|
jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,pombredanne/django-fluent-contents,jpotterm/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents
|
python
|
## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
## Instruction:
Add ContentItemRelation to SharedContent model
Displays objects in the admin delete screen.
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
# Adding the reverse relation for ContentItem objects
# causes the admin to list these objects when moving the shared content
contentitem_set = ContentItemRelation()
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
|
...
from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation
class SharedContent(TranslatableModel):
...
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
# Adding the reverse relation for ContentItem objects
# causes the admin to list these objects when moving the shared content
contentitem_set = ContentItemRelation()
class Meta:
verbose_name = _("Shared content")
...
|
f5885a2644a21def7340e0d34b809a98472b366c
|
heufybot/modules/commands/nick.py
|
heufybot/modules/commands/nick.py
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class NickCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Nick"
def triggers(self):
return ["nick"]
def load(self):
self.help = "Commands: nick | Change the nickname of the bot."
self.commandHelp = {}
def checkPermissions(self, server, source, user, command):
return not self.bot.moduleHandler.runActionUntilFalse("checkadminpermission", server, source, user,
"connection-control")
def execute(self, server, source, command, params, data):
if len(params) < 1:
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, "Change my nick to what?")
else:
self.bot.servers[server].outputHandler.cmdNICK(params[0])
nickCommand = NickCommand()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class NickCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Nick"
def triggers(self):
return ["nick"]
def load(self):
self.help = "Commands: nick <newnick> | Change the nickname of the bot."
self.commandHelp = {}
def checkPermissions(self, server, source, user, command):
return not self.bot.moduleHandler.runActionUntilFalse("checkadminpermission", server, source, user,
"connection-control")
def execute(self, server, source, command, params, data):
if len(params) < 1:
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, "Change my nick to what?")
else:
self.bot.servers[server].outputHandler.cmdNICK(params[0])
nickCommand = NickCommand()
|
Fix the Nick command help text
|
Fix the Nick command help text
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
python
|
## Code Before:
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class NickCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Nick"
def triggers(self):
return ["nick"]
def load(self):
self.help = "Commands: nick | Change the nickname of the bot."
self.commandHelp = {}
def checkPermissions(self, server, source, user, command):
return not self.bot.moduleHandler.runActionUntilFalse("checkadminpermission", server, source, user,
"connection-control")
def execute(self, server, source, command, params, data):
if len(params) < 1:
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, "Change my nick to what?")
else:
self.bot.servers[server].outputHandler.cmdNICK(params[0])
nickCommand = NickCommand()
## Instruction:
Fix the Nick command help text
## Code After:
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class NickCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Nick"
def triggers(self):
return ["nick"]
def load(self):
self.help = "Commands: nick <newnick> | Change the nickname of the bot."
self.commandHelp = {}
def checkPermissions(self, server, source, user, command):
return not self.bot.moduleHandler.runActionUntilFalse("checkadminpermission", server, source, user,
"connection-control")
def execute(self, server, source, command, params, data):
if len(params) < 1:
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, "Change my nick to what?")
else:
self.bot.servers[server].outputHandler.cmdNICK(params[0])
nickCommand = NickCommand()
|
...
return ["nick"]
def load(self):
self.help = "Commands: nick <newnick> | Change the nickname of the bot."
self.commandHelp = {}
def checkPermissions(self, server, source, user, command):
...
|
ca419f6d8ae07ef1b83834f9d5d726fed74c2a44
|
bgc/DIC_ATMOS.h
|
bgc/DIC_ATMOS.h
|
C $Header: /u/gcmpack/MITgcm/pkg/dic/DIC_ATMOS.h,v 1.4 2010/04/11 20:59:27 jmc Exp $
C $Name: $
COMMON /INTERACT_ATMOS_NEEDS/
& co2atmos,
& total_atmos_carbon, total_ocean_carbon,
& total_atmos_carbon_year,
& total_ocean_carbon_year,
& total_atmos_carbon_start,
& total_ocean_carbon_start,
& atpco2
_RL co2atmos(1000)
_RL total_atmos_carbon
_RL total_ocean_carbon
_RL total_atmos_carbon_year
_RL total_atmos_carbon_start
_RL total_ocean_carbon_year
_RL total_ocean_carbon_start
_RL atpco2
|
C $Header: /u/gcmpack/MITgcm/pkg/dic/DIC_ATMOS.h,v 1.4 2010/04/11 20:59:27 jmc Exp $
C $Name: $
COMMON /INTERACT_ATMOS_NEEDS/
& co2atmos,
& total_atmos_carbon, total_ocean_carbon,
& total_atmos_carbon_year,
& total_ocean_carbon_year,
& total_atmos_carbon_start,
& total_ocean_carbon_start,
& atpco2,total_atmos_moles
_RL co2atmos(1000)
_RL total_atmos_carbon
_RL total_ocean_carbon
_RL total_atmos_carbon_year
_RL total_atmos_carbon_start
_RL total_ocean_carbon_year
_RL total_ocean_carbon_start
_RL atpco2
_RL total_atmos_moles
|
Add ability to specify CO2 emissions (dic_int1=4)
|
Add ability to specify CO2 emissions (dic_int1=4)
|
C
|
mit
|
seamanticscience/mitgcm_mods,seamanticscience/mitgcm_mods
|
c
|
## Code Before:
C $Header: /u/gcmpack/MITgcm/pkg/dic/DIC_ATMOS.h,v 1.4 2010/04/11 20:59:27 jmc Exp $
C $Name: $
COMMON /INTERACT_ATMOS_NEEDS/
& co2atmos,
& total_atmos_carbon, total_ocean_carbon,
& total_atmos_carbon_year,
& total_ocean_carbon_year,
& total_atmos_carbon_start,
& total_ocean_carbon_start,
& atpco2
_RL co2atmos(1000)
_RL total_atmos_carbon
_RL total_ocean_carbon
_RL total_atmos_carbon_year
_RL total_atmos_carbon_start
_RL total_ocean_carbon_year
_RL total_ocean_carbon_start
_RL atpco2
## Instruction:
Add ability to specify CO2 emissions (dic_int1=4)
## Code After:
C $Header: /u/gcmpack/MITgcm/pkg/dic/DIC_ATMOS.h,v 1.4 2010/04/11 20:59:27 jmc Exp $
C $Name: $
COMMON /INTERACT_ATMOS_NEEDS/
& co2atmos,
& total_atmos_carbon, total_ocean_carbon,
& total_atmos_carbon_year,
& total_ocean_carbon_year,
& total_atmos_carbon_start,
& total_ocean_carbon_start,
& atpco2,total_atmos_moles
_RL co2atmos(1000)
_RL total_atmos_carbon
_RL total_ocean_carbon
_RL total_atmos_carbon_year
_RL total_atmos_carbon_start
_RL total_ocean_carbon_year
_RL total_ocean_carbon_start
_RL atpco2
_RL total_atmos_moles
|
// ... existing code ...
& total_ocean_carbon_year,
& total_atmos_carbon_start,
& total_ocean_carbon_start,
& atpco2,total_atmos_moles
_RL co2atmos(1000)
_RL total_atmos_carbon
// ... modified code ...
_RL total_ocean_carbon_year
_RL total_ocean_carbon_start
_RL atpco2
_RL total_atmos_moles
// ... rest of the code ...
|
76551f7a05506a872ec6535eb3263710650ea8ce
|
glue/core/data_factories/__init__.py
|
glue/core/data_factories/__init__.py
|
from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .dendrogram import *
from .tables import *
|
from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
|
Order of import matters for disambiguation, but this should be fixed later to avoid this.
|
Order of import matters for disambiguation, but this should be fixed later to avoid this.
|
Python
|
bsd-3-clause
|
saimn/glue,stscieisenhamer/glue,stscieisenhamer/glue,JudoWill/glue,saimn/glue,JudoWill/glue
|
python
|
## Code Before:
from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .dendrogram import *
from .tables import *
## Instruction:
Order of import matters for disambiguation, but this should be fixed later to avoid this.
## Code After:
from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
|
// ... existing code ...
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
// ... rest of the code ...
|
cfdaffb8cf65ab0a934de2efb513e66989b4bad6
|
src/lib/elm_map_legacy.h
|
src/lib/elm_map_legacy.h
|
/**
* Add a new map widget to the given parent Elementary (container) object.
*
* @param parent The parent object.
* @return a new map widget handle or @c NULL, on errors.
*
* This function inserts a new map widget on the canvas.
*
* @ingroup Map
*/
EAPI Evas_Object *elm_map_add(Evas_Object *parent);
#include "elm_map.eo.legacy.h"
|
/**
* Add a new map widget to the given parent Elementary (container) object.
*
* @param parent The parent object.
* @return a new map widget handle or @c NULL, on errors.
*
* This function inserts a new map widget on the canvas.
*
* @ingroup Map
*/
EAPI Evas_Object *elm_map_add(Evas_Object *parent);
/**
* @internal
*
* @brief Requests a list of addresses corresponding to a given name.
*
* @since 1.8
*
* @remarks This is used if you want to search the address from a name.
*
* @param obj The map object
* @param address The address
* @param name_cb The callback function
* @param data The user callback data
*
* @ingroup Map
*/
EAPI void elm_map_name_search(const Evas_Object *obj, const char *address, Elm_Map_Name_List_Cb name_cb, void *data);
#include "elm_map.eo.legacy.h"
|
Add missing legacy API into legacy header
|
map: Add missing legacy API into legacy header
Summary: @fix
Reviewers: raster
Reviewed By: raster
Differential Revision: https://phab.enlightenment.org/D1164
|
C
|
lgpl-2.1
|
FlorentRevest/Elementary,tasn/elementary,tasn/elementary,tasn/elementary,rvandegrift/elementary,FlorentRevest/Elementary,tasn/elementary,FlorentRevest/Elementary,tasn/elementary,rvandegrift/elementary,FlorentRevest/Elementary,rvandegrift/elementary,rvandegrift/elementary
|
c
|
## Code Before:
/**
* Add a new map widget to the given parent Elementary (container) object.
*
* @param parent The parent object.
* @return a new map widget handle or @c NULL, on errors.
*
* This function inserts a new map widget on the canvas.
*
* @ingroup Map
*/
EAPI Evas_Object *elm_map_add(Evas_Object *parent);
#include "elm_map.eo.legacy.h"
## Instruction:
map: Add missing legacy API into legacy header
Summary: @fix
Reviewers: raster
Reviewed By: raster
Differential Revision: https://phab.enlightenment.org/D1164
## Code After:
/**
* Add a new map widget to the given parent Elementary (container) object.
*
* @param parent The parent object.
* @return a new map widget handle or @c NULL, on errors.
*
* This function inserts a new map widget on the canvas.
*
* @ingroup Map
*/
EAPI Evas_Object *elm_map_add(Evas_Object *parent);
/**
* @internal
*
* @brief Requests a list of addresses corresponding to a given name.
*
* @since 1.8
*
* @remarks This is used if you want to search the address from a name.
*
* @param obj The map object
* @param address The address
* @param name_cb The callback function
* @param data The user callback data
*
* @ingroup Map
*/
EAPI void elm_map_name_search(const Evas_Object *obj, const char *address, Elm_Map_Name_List_Cb name_cb, void *data);
#include "elm_map.eo.legacy.h"
|
...
*/
EAPI Evas_Object *elm_map_add(Evas_Object *parent);
/**
* @internal
*
* @brief Requests a list of addresses corresponding to a given name.
*
* @since 1.8
*
* @remarks This is used if you want to search the address from a name.
*
* @param obj The map object
* @param address The address
* @param name_cb The callback function
* @param data The user callback data
*
* @ingroup Map
*/
EAPI void elm_map_name_search(const Evas_Object *obj, const char *address, Elm_Map_Name_List_Cb name_cb, void *data);
#include "elm_map.eo.legacy.h"
...
|
037e15f383c326f1f4e7de59bc3ec3520ac6ce40
|
pystachio/__init__.py
|
pystachio/__init__.py
|
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
|
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
|
Add check for minimum Python version
|
Add check for minimum Python version
|
Python
|
mit
|
wickman/pystachio
|
python
|
## Code Before:
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
## Instruction:
Add check for minimum Python version
## Code After:
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
|
# ... existing code ...
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
# ... rest of the code ...
|
f3f4b0e1ab4904f6a1d9011d9abc7735e65b458c
|
nbdump.c
|
nbdump.c
|
// The default size is enought to hold a whole ethernet frame (< 1524 bytes)
#ifndef PACKET_BUFFER_SIZE
#define PACKET_BUFFER_SIZE 2048
#endif
static void print_packet(const unsigned char *pkt, size_t pktlen)
{
while (pktlen--) {
printf("%02x", *pkt++);
}
putchar('\n');
}
int main(int argc, char *argv[])
{
unsigned char pktbuf[PACKET_BUFFER_SIZE];
int sockfd, res = 0;
sockfd = socket(AF_PACKET, SOCK_RAW, htons(ETH_P_ALL));
if (-1 == sockfd) {
perror("socket");
goto error;
}
while (1) {
ssize_t pktlen = read(sockfd, pktbuf, sizeof(pktbuf));
if (pktlen < 0) {
perror("read");
continue;
}
if (pktlen > 0) {
print_packet(pktbuf, pktlen);
}
}
close(sockfd);
out:
return res;
error:
res = 1;
goto out;
}
|
// The default size is enought to hold a whole ethernet frame (< 1524 bytes)
#ifndef PACKET_BUFFER_SIZE
#define PACKET_BUFFER_SIZE 2048
#endif
static void print_packet(const unsigned char *pkt, size_t pktlen)
{
while (pktlen--) {
printf("%02x", *pkt++);
}
putchar('\n');
}
int main(int argc, char *argv[])
{
unsigned char pktbuf[PACKET_BUFFER_SIZE];
int sockfd, res = 0;
sockfd = socket(AF_PACKET, SOCK_RAW, htons(ETH_P_ALL));
if (-1 == sockfd) {
perror("socket");
goto error;
}
while (1) {
ssize_t pktlen = read(sockfd, pktbuf, sizeof(pktbuf));
if (pktlen < 0) {
perror("read");
continue;
}
if (pktlen > 0) {
print_packet(pktbuf, pktlen);
}
}
close(sockfd);
out:
return res;
error:
res = 1;
goto out;
}
|
Fix indentation by using tabs only
|
Fix indentation by using tabs only
|
C
|
mit
|
grn/netbox,grn/netbox
|
c
|
## Code Before:
// The default size is enought to hold a whole ethernet frame (< 1524 bytes)
#ifndef PACKET_BUFFER_SIZE
#define PACKET_BUFFER_SIZE 2048
#endif
static void print_packet(const unsigned char *pkt, size_t pktlen)
{
while (pktlen--) {
printf("%02x", *pkt++);
}
putchar('\n');
}
int main(int argc, char *argv[])
{
unsigned char pktbuf[PACKET_BUFFER_SIZE];
int sockfd, res = 0;
sockfd = socket(AF_PACKET, SOCK_RAW, htons(ETH_P_ALL));
if (-1 == sockfd) {
perror("socket");
goto error;
}
while (1) {
ssize_t pktlen = read(sockfd, pktbuf, sizeof(pktbuf));
if (pktlen < 0) {
perror("read");
continue;
}
if (pktlen > 0) {
print_packet(pktbuf, pktlen);
}
}
close(sockfd);
out:
return res;
error:
res = 1;
goto out;
}
## Instruction:
Fix indentation by using tabs only
## Code After:
// The default size is enought to hold a whole ethernet frame (< 1524 bytes)
#ifndef PACKET_BUFFER_SIZE
#define PACKET_BUFFER_SIZE 2048
#endif
static void print_packet(const unsigned char *pkt, size_t pktlen)
{
while (pktlen--) {
printf("%02x", *pkt++);
}
putchar('\n');
}
int main(int argc, char *argv[])
{
unsigned char pktbuf[PACKET_BUFFER_SIZE];
int sockfd, res = 0;
sockfd = socket(AF_PACKET, SOCK_RAW, htons(ETH_P_ALL));
if (-1 == sockfd) {
perror("socket");
goto error;
}
while (1) {
ssize_t pktlen = read(sockfd, pktbuf, sizeof(pktbuf));
if (pktlen < 0) {
perror("read");
continue;
}
if (pktlen > 0) {
print_packet(pktbuf, pktlen);
}
}
close(sockfd);
out:
return res;
error:
res = 1;
goto out;
}
|
...
sockfd = socket(AF_PACKET, SOCK_RAW, htons(ETH_P_ALL));
if (-1 == sockfd) {
perror("socket");
goto error;
}
while (1) {
ssize_t pktlen = read(sockfd, pktbuf, sizeof(pktbuf));
if (pktlen < 0) {
perror("read");
continue;
}
if (pktlen > 0) {
print_packet(pktbuf, pktlen);
}
}
close(sockfd);
out:
return res;
...
|
1bacf677311b211ec75780298a4e366c9c65e307
|
apps/polls/tests.py
|
apps/polls/tests.py
|
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
|
import datetime
from django.utils import timezone
from django.test import TestCase
from apps.polls.models import Poll
class PollMethodTests(TestCase):
def test_was_published_recently_with_future_poll(self):
"""
was_published_recently() should return False for polls whose
pub_date is in the future
"""
future_poll = Poll(pub_date=timezone.now() + datetime.timedelta(days=30))
self.assertEqual(future_poll.was_published_recently(), False)
|
Create a test to expose the bug
|
Create a test to expose the bug
|
Python
|
bsd-3-clause
|
cuzen1/teracy-tutorial,cuzen1/teracy-tutorial,cuzen1/teracy-tutorial
|
python
|
## Code Before:
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
## Instruction:
Create a test to expose the bug
## Code After:
import datetime
from django.utils import timezone
from django.test import TestCase
from apps.polls.models import Poll
class PollMethodTests(TestCase):
def test_was_published_recently_with_future_poll(self):
"""
was_published_recently() should return False for polls whose
pub_date is in the future
"""
future_poll = Poll(pub_date=timezone.now() + datetime.timedelta(days=30))
self.assertEqual(future_poll.was_published_recently(), False)
|
...
import datetime
from django.utils import timezone
from django.test import TestCase
from apps.polls.models import Poll
class PollMethodTests(TestCase):
def test_was_published_recently_with_future_poll(self):
"""
was_published_recently() should return False for polls whose
pub_date is in the future
"""
future_poll = Poll(pub_date=timezone.now() + datetime.timedelta(days=30))
self.assertEqual(future_poll.was_published_recently(), False)
...
|
ecb0922db7aca55bde6177e37b5e20f94fb59b60
|
copy/opt/core/bin/remove_mailbox.py
|
copy/opt/core/bin/remove_mailbox.py
|
import redis
import os.path
import shutil
import glob
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
for dirDepth in dirsDepth:
domain, user = dirDepth.split('/')
mail = user + '@' + domain
exists = r.exists('mail.account:obj:' + mail)
if not exists:
mdir = MAILDIR + "/" + domain + "/" + user
shutil.rmtree(mdir)
print '%s,%s' % (mail, mdir)
|
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
|
Rebuild mailbox remove script with some verify checks
|
Rebuild mailbox remove script with some verify checks
We should not remove a mailbox and only archive it. Also it should
be good to be sure the mailbox doesn't exists in redis anymore after
two runs.
|
Python
|
mit
|
skylime/mi-core-mbox,skylime/mi-core-mbox
|
python
|
## Code Before:
import redis
import os.path
import shutil
import glob
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
for dirDepth in dirsDepth:
domain, user = dirDepth.split('/')
mail = user + '@' + domain
exists = r.exists('mail.account:obj:' + mail)
if not exists:
mdir = MAILDIR + "/" + domain + "/" + user
shutil.rmtree(mdir)
print '%s,%s' % (mail, mdir)
## Instruction:
Rebuild mailbox remove script with some verify checks
We should not remove a mailbox and only archive it. Also it should
be good to be sure the mailbox doesn't exists in redis anymore after
two runs.
## Code After:
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
|
# ... existing code ...
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
# ... modified code ...
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
# ... rest of the code ...
|
d96e52c346314622afc904a2917416028c6784e3
|
swampdragon_live/models.py
|
swampdragon_live/models.py
|
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from .tasks import push_new_content
@receiver(post_save)
def post_save_handler(sender, instance, **kwargs):
instance_type = ContentType.objects.get_for_model(instance.__class__)
push_new_content.apply_async(countdown=1, kwargs={'instance_type_pk': instance_type.pk,
'instance_pk': instance.pk})
|
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from .tasks import push_new_content
@receiver(post_save)
def post_save_handler(sender, instance, **kwargs):
if ContentType.objects.exists():
instance_type = ContentType.objects.get_for_model(instance.__class__)
push_new_content.apply_async(countdown=1, kwargs={'instance_type_pk': instance_type.pk,
'instance_pk': instance.pk})
|
Fix initial migration until ContentType is available
|
Fix initial migration until ContentType is available
|
Python
|
mit
|
mback2k/swampdragon-live,mback2k/swampdragon-live
|
python
|
## Code Before:
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from .tasks import push_new_content
@receiver(post_save)
def post_save_handler(sender, instance, **kwargs):
instance_type = ContentType.objects.get_for_model(instance.__class__)
push_new_content.apply_async(countdown=1, kwargs={'instance_type_pk': instance_type.pk,
'instance_pk': instance.pk})
## Instruction:
Fix initial migration until ContentType is available
## Code After:
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from .tasks import push_new_content
@receiver(post_save)
def post_save_handler(sender, instance, **kwargs):
if ContentType.objects.exists():
instance_type = ContentType.objects.get_for_model(instance.__class__)
push_new_content.apply_async(countdown=1, kwargs={'instance_type_pk': instance_type.pk,
'instance_pk': instance.pk})
|
# ... existing code ...
@receiver(post_save)
def post_save_handler(sender, instance, **kwargs):
if ContentType.objects.exists():
instance_type = ContentType.objects.get_for_model(instance.__class__)
push_new_content.apply_async(countdown=1, kwargs={'instance_type_pk': instance_type.pk,
'instance_pk': instance.pk})
# ... rest of the code ...
|
78497a5ef492f18511c4a09b5ca62facafe9c302
|
setup.py
|
setup.py
|
"""Installation script."""
from os import path
from setuptools import find_packages, setup
HERE = path.abspath(path.dirname(__file__))
with open(path.join(HERE, 'README.rst')) as f:
LONG_DESCRIPTION = f.read().strip()
setup(
name='fuel',
version='0.1a1', # PEP 440 compliant
description='Data pipeline framework for machine learning',
long_description=LONG_DESCRIPTION,
url='https://github.com/bartvm/fuel.git',
author='Universite de Montreal',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
keywords='dataset data iteration pipeline processing',
packages=find_packages(exclude=['tests']),
install_requires=['six', 'picklable_itertools', 'toolz', 'pyyaml', 'h5py',
'tables'],
scripts=['bin/fuel-convert']
)
|
"""Installation script."""
from os import path
from setuptools import find_packages, setup
HERE = path.abspath(path.dirname(__file__))
with open(path.join(HERE, 'README.rst')) as f:
LONG_DESCRIPTION = f.read().strip()
setup(
name='fuel',
version='0.1a1', # PEP 440 compliant
description='Data pipeline framework for machine learning',
long_description=LONG_DESCRIPTION,
url='https://github.com/bartvm/fuel.git',
author='Universite de Montreal',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
keywords='dataset data iteration pipeline processing',
packages=find_packages(exclude=['tests']),
install_requires=['six', 'picklable_itertools', 'toolz', 'pyyaml', 'h5py',
'tables'],
scripts=['bin/fuel-convert', 'bin/fuel-download']
)
|
Add fuel-download to installed scripts
|
Add fuel-download to installed scripts
|
Python
|
mit
|
capybaralet/fuel,glewis17/fuel,EderSantana/fuel,bouthilx/fuel,laurent-dinh/fuel,orhanf/fuel,mila-udem/fuel,rizar/fuel,codeaudit/fuel,udibr/fuel,glewis17/fuel,orhanf/fuel,ejls/fuel,capybaralet/fuel,dmitriy-serdyuk/fuel,markusnagel/fuel,ejls/fuel,dwf/fuel,janchorowski/fuel,laurent-dinh/fuel,dribnet/fuel,janchorowski/fuel,dhruvparamhans/fuel,mjwillson/fuel,bouthilx/fuel,dribnet/fuel,aalmah/fuel,harmdevries89/fuel,dwf/fuel,jbornschein/fuel,EderSantana/fuel,hantek/fuel,dhruvparamhans/fuel,harmdevries89/fuel,vdumoulin/fuel,dmitriy-serdyuk/fuel,udibr/fuel,lamblin/fuel,chrishokamp/fuel,jbornschein/fuel,lamblin/fuel,rodrigob/fuel,chrishokamp/fuel,rodrigob/fuel,hantek/fuel,mila-udem/fuel,rizar/fuel,markusnagel/fuel,mjwillson/fuel,codeaudit/fuel,vdumoulin/fuel,aalmah/fuel
|
python
|
## Code Before:
"""Installation script."""
from os import path
from setuptools import find_packages, setup
HERE = path.abspath(path.dirname(__file__))
with open(path.join(HERE, 'README.rst')) as f:
LONG_DESCRIPTION = f.read().strip()
setup(
name='fuel',
version='0.1a1', # PEP 440 compliant
description='Data pipeline framework for machine learning',
long_description=LONG_DESCRIPTION,
url='https://github.com/bartvm/fuel.git',
author='Universite de Montreal',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
keywords='dataset data iteration pipeline processing',
packages=find_packages(exclude=['tests']),
install_requires=['six', 'picklable_itertools', 'toolz', 'pyyaml', 'h5py',
'tables'],
scripts=['bin/fuel-convert']
)
## Instruction:
Add fuel-download to installed scripts
## Code After:
"""Installation script."""
from os import path
from setuptools import find_packages, setup
HERE = path.abspath(path.dirname(__file__))
with open(path.join(HERE, 'README.rst')) as f:
LONG_DESCRIPTION = f.read().strip()
setup(
name='fuel',
version='0.1a1', # PEP 440 compliant
description='Data pipeline framework for machine learning',
long_description=LONG_DESCRIPTION,
url='https://github.com/bartvm/fuel.git',
author='Universite de Montreal',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
keywords='dataset data iteration pipeline processing',
packages=find_packages(exclude=['tests']),
install_requires=['six', 'picklable_itertools', 'toolz', 'pyyaml', 'h5py',
'tables'],
scripts=['bin/fuel-convert', 'bin/fuel-download']
)
|
...
packages=find_packages(exclude=['tests']),
install_requires=['six', 'picklable_itertools', 'toolz', 'pyyaml', 'h5py',
'tables'],
scripts=['bin/fuel-convert', 'bin/fuel-download']
)
...
|
1db5fefc1752b71bf11fbf63853f7c93bcc526f5
|
tests/macaroon_property_tests.py
|
tests/macaroon_property_tests.py
|
from __future__ import unicode_literals
from mock import *
from nose.tools import *
from hypothesis import *
from hypothesis.specifiers import *
from six import text_type, binary_type
from pymacaroons import Macaroon, Verifier
ascii_text_stategy = strategy(text_type).map(
lambda s: s.encode('ascii', 'ignore')
)
ascii_bin_strategy = strategy(binary_type).map(
lambda s: s.decode('ascii', 'ignore')
)
class TestMacaroon(object):
def setup(self):
pass
@given(
key_id=one_of((ascii_text_stategy, ascii_bin_strategy)),
loc=one_of((ascii_text_stategy, ascii_bin_strategy)),
key=one_of((ascii_text_stategy, ascii_bin_strategy))
)
def test_serializing_deserializing_macaroon(self, key_id, loc, key):
assume(key_id and loc and key)
macaroon = Macaroon(
location=loc,
identifier=key_id,
key=key
)
deserialized = Macaroon.deserialize(macaroon.serialize())
assert_equal(macaroon.identifier, deserialized.identifier)
assert_equal(macaroon.location, deserialized.location)
assert_equal(macaroon.signature, deserialized.signature)
|
from __future__ import unicode_literals
from mock import *
from nose.tools import *
from hypothesis import *
from hypothesis.specifiers import *
from six import text_type, binary_type
from pymacaroons import Macaroon, Verifier
from pymacaroons.utils import convert_to_bytes
ascii_text_strategy = strategy(
[sampled_from(map(chr, range(0, 128)))]
).map(lambda c: ''.join(c))
ascii_bin_strategy = strategy(ascii_text_strategy).map(
lambda s: convert_to_bytes(s)
)
class TestMacaroon(object):
def setup(self):
pass
@given(
key_id=one_of((ascii_text_strategy, ascii_bin_strategy)),
loc=one_of((ascii_text_strategy, ascii_bin_strategy)),
key=one_of((ascii_text_strategy, ascii_bin_strategy))
)
def test_serializing_deserializing_macaroon(self, key_id, loc, key):
assume(key_id and loc and key)
macaroon = Macaroon(
location=loc,
identifier=key_id,
key=key
)
deserialized = Macaroon.deserialize(macaroon.serialize())
assert_equal(macaroon.identifier, deserialized.identifier)
assert_equal(macaroon.location, deserialized.location)
assert_equal(macaroon.signature, deserialized.signature)
|
Improve strategies in property tests
|
Improve strategies in property tests
|
Python
|
mit
|
matrix-org/pymacaroons,matrix-org/pymacaroons,ecordell/pymacaroons,illicitonion/pymacaroons
|
python
|
## Code Before:
from __future__ import unicode_literals
from mock import *
from nose.tools import *
from hypothesis import *
from hypothesis.specifiers import *
from six import text_type, binary_type
from pymacaroons import Macaroon, Verifier
ascii_text_stategy = strategy(text_type).map(
lambda s: s.encode('ascii', 'ignore')
)
ascii_bin_strategy = strategy(binary_type).map(
lambda s: s.decode('ascii', 'ignore')
)
class TestMacaroon(object):
def setup(self):
pass
@given(
key_id=one_of((ascii_text_stategy, ascii_bin_strategy)),
loc=one_of((ascii_text_stategy, ascii_bin_strategy)),
key=one_of((ascii_text_stategy, ascii_bin_strategy))
)
def test_serializing_deserializing_macaroon(self, key_id, loc, key):
assume(key_id and loc and key)
macaroon = Macaroon(
location=loc,
identifier=key_id,
key=key
)
deserialized = Macaroon.deserialize(macaroon.serialize())
assert_equal(macaroon.identifier, deserialized.identifier)
assert_equal(macaroon.location, deserialized.location)
assert_equal(macaroon.signature, deserialized.signature)
## Instruction:
Improve strategies in property tests
## Code After:
from __future__ import unicode_literals
from mock import *
from nose.tools import *
from hypothesis import *
from hypothesis.specifiers import *
from six import text_type, binary_type
from pymacaroons import Macaroon, Verifier
from pymacaroons.utils import convert_to_bytes
ascii_text_strategy = strategy(
[sampled_from(map(chr, range(0, 128)))]
).map(lambda c: ''.join(c))
ascii_bin_strategy = strategy(ascii_text_strategy).map(
lambda s: convert_to_bytes(s)
)
class TestMacaroon(object):
def setup(self):
pass
@given(
key_id=one_of((ascii_text_strategy, ascii_bin_strategy)),
loc=one_of((ascii_text_strategy, ascii_bin_strategy)),
key=one_of((ascii_text_strategy, ascii_bin_strategy))
)
def test_serializing_deserializing_macaroon(self, key_id, loc, key):
assume(key_id and loc and key)
macaroon = Macaroon(
location=loc,
identifier=key_id,
key=key
)
deserialized = Macaroon.deserialize(macaroon.serialize())
assert_equal(macaroon.identifier, deserialized.identifier)
assert_equal(macaroon.location, deserialized.location)
assert_equal(macaroon.signature, deserialized.signature)
|
...
from six import text_type, binary_type
from pymacaroons import Macaroon, Verifier
from pymacaroons.utils import convert_to_bytes
ascii_text_strategy = strategy(
[sampled_from(map(chr, range(0, 128)))]
).map(lambda c: ''.join(c))
ascii_bin_strategy = strategy(ascii_text_strategy).map(
lambda s: convert_to_bytes(s)
)
...
pass
@given(
key_id=one_of((ascii_text_strategy, ascii_bin_strategy)),
loc=one_of((ascii_text_strategy, ascii_bin_strategy)),
key=one_of((ascii_text_strategy, ascii_bin_strategy))
)
def test_serializing_deserializing_macaroon(self, key_id, loc, key):
assume(key_id and loc and key)
...
|
3a3997b19966560b828efb1699ee29a58cacbfc8
|
spriteworld/configs/cobra/common.py
|
spriteworld/configs/cobra/common.py
|
"""Shared definitions and methods across all COBRA tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from spriteworld import action_spaces
from spriteworld import renderers as spriteworld_renderers
def action_space():
return action_spaces.SelectMove(scale=0.25, noise_scale=0.05)
def renderers():
return {
'image':
spriteworld_renderers.PILRenderer(
image_size=(64, 64),
anti_aliasing=5,
color_to_rgb=spriteworld_renderers.color_maps.hsv_to_rgb,
)
}
|
"""Shared definitions and methods across all COBRA tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from spriteworld import action_spaces
from spriteworld import renderers as spriteworld_renderers
def action_space():
return action_spaces.SelectMove(scale=0.25)
def renderers():
return {
'image':
spriteworld_renderers.PILRenderer(
image_size=(64, 64),
anti_aliasing=5,
color_to_rgb=spriteworld_renderers.color_maps.hsv_to_rgb,
)
}
|
Remove noise from default COBRA configs.
|
Remove noise from default COBRA configs.
PiperOrigin-RevId: 265733849
Change-Id: Ie0e7c0385497852fd85c769ee85c951542c14463
|
Python
|
apache-2.0
|
deepmind/spriteworld
|
python
|
## Code Before:
"""Shared definitions and methods across all COBRA tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from spriteworld import action_spaces
from spriteworld import renderers as spriteworld_renderers
def action_space():
return action_spaces.SelectMove(scale=0.25, noise_scale=0.05)
def renderers():
return {
'image':
spriteworld_renderers.PILRenderer(
image_size=(64, 64),
anti_aliasing=5,
color_to_rgb=spriteworld_renderers.color_maps.hsv_to_rgb,
)
}
## Instruction:
Remove noise from default COBRA configs.
PiperOrigin-RevId: 265733849
Change-Id: Ie0e7c0385497852fd85c769ee85c951542c14463
## Code After:
"""Shared definitions and methods across all COBRA tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from spriteworld import action_spaces
from spriteworld import renderers as spriteworld_renderers
def action_space():
return action_spaces.SelectMove(scale=0.25)
def renderers():
return {
'image':
spriteworld_renderers.PILRenderer(
image_size=(64, 64),
anti_aliasing=5,
color_to_rgb=spriteworld_renderers.color_maps.hsv_to_rgb,
)
}
|
# ... existing code ...
def action_space():
return action_spaces.SelectMove(scale=0.25)
def renderers():
# ... rest of the code ...
|
ed168c2f16e41cd3782e9b6951016f726392c0f8
|
02_code/web/eorder-ws/src/main/java/com/innovaee/eorder/dao/hibernate/HibernateUserDao.java
|
02_code/web/eorder-ws/src/main/java/com/innovaee/eorder/dao/hibernate/HibernateUserDao.java
|
/***********************************************
* Filename : HibernateUserDao.java
* Copyright : Copyright (c) 2014
* Company : Innovaee
* Created : 11/27/2014
************************************************/
package com.innovaee.eorder.dao.hibernate;
import com.innovaee.eorder.dao.UserDao;
import com.innovaee.eorder.entity.User;
import java.util.List;
/**
* @Title: HibernateUserDao
* @Description: 用户数据访问对象实现类
*
* @version V1.0
*/
public class HibernateUserDao extends HibernateBaseDao<User> implements UserDao {
/**
* 根据手机号码得到用户
*
* @param cellphone
* 手机号码
* @return 用户
*/
public User getUserByCellphone(String cellphone) {
String hql = "from User as user where user.cellphone = ?";
Object[] paras = { cellphone };
List<User> users = getPage(0, 1, hql, paras);
if (0 != users.size()) {
return users.get(0);
}
return null;
}
}
|
/***********************************************
* Filename : HibernateUserDao.java
* Copyright : Copyright (c) 2014
* Company : Innovaee
* Created : 11/27/2014
************************************************/
package com.innovaee.eorder.dao.hibernate;
import com.innovaee.eorder.dao.UserDao;
import com.innovaee.eorder.entity.User;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.criterion.Restrictions;
import org.springframework.orm.hibernate3.HibernateCallback;
/**
* @Title: HibernateUserDao
* @Description: 用户数据访问对象实现类
*
* @version V1.0
*/
public class HibernateUserDao extends HibernateBaseDao<User> implements UserDao {
/**
* 根据手机号码得到用户
*
* @param cellphone
* 手机号码
* @return 用户
*/
public User getUserByCellphone(final String cellphone) {
// String hql = "from User as user where user.cellphone = ?";
// Object[] paras = { cellphone };
// List<User> users = getPage(0, 1, hql, paras);
// if (0 != users.size()) {
// return users.get(0);
// }
//
// return null;
return getHibernateTemplate().execute(new HibernateCallback<User>(){
public User doInHibernate(Session session) {
Criteria criteria = session.createCriteria(User.class);
criteria.add(Restrictions.eq("cellphone", cellphone));
return (User)(criteria.list().iterator().next());
}
});
}
}
|
Use Hibernate Criteria to query User by cellphone
|
Use Hibernate Criteria to query User by cellphone
|
Java
|
mit
|
aaronluo/cuoxiazi,aaronluo/cuoxiazi,aaronluo/cuoxiazi
|
java
|
## Code Before:
/***********************************************
* Filename : HibernateUserDao.java
* Copyright : Copyright (c) 2014
* Company : Innovaee
* Created : 11/27/2014
************************************************/
package com.innovaee.eorder.dao.hibernate;
import com.innovaee.eorder.dao.UserDao;
import com.innovaee.eorder.entity.User;
import java.util.List;
/**
* @Title: HibernateUserDao
* @Description: 用户数据访问对象实现类
*
* @version V1.0
*/
public class HibernateUserDao extends HibernateBaseDao<User> implements UserDao {
/**
* 根据手机号码得到用户
*
* @param cellphone
* 手机号码
* @return 用户
*/
public User getUserByCellphone(String cellphone) {
String hql = "from User as user where user.cellphone = ?";
Object[] paras = { cellphone };
List<User> users = getPage(0, 1, hql, paras);
if (0 != users.size()) {
return users.get(0);
}
return null;
}
}
## Instruction:
Use Hibernate Criteria to query User by cellphone
## Code After:
/***********************************************
* Filename : HibernateUserDao.java
* Copyright : Copyright (c) 2014
* Company : Innovaee
* Created : 11/27/2014
************************************************/
package com.innovaee.eorder.dao.hibernate;
import com.innovaee.eorder.dao.UserDao;
import com.innovaee.eorder.entity.User;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.criterion.Restrictions;
import org.springframework.orm.hibernate3.HibernateCallback;
/**
* @Title: HibernateUserDao
* @Description: 用户数据访问对象实现类
*
* @version V1.0
*/
public class HibernateUserDao extends HibernateBaseDao<User> implements UserDao {
/**
* 根据手机号码得到用户
*
* @param cellphone
* 手机号码
* @return 用户
*/
public User getUserByCellphone(final String cellphone) {
// String hql = "from User as user where user.cellphone = ?";
// Object[] paras = { cellphone };
// List<User> users = getPage(0, 1, hql, paras);
// if (0 != users.size()) {
// return users.get(0);
// }
//
// return null;
return getHibernateTemplate().execute(new HibernateCallback<User>(){
public User doInHibernate(Session session) {
Criteria criteria = session.createCriteria(User.class);
criteria.add(Restrictions.eq("cellphone", cellphone));
return (User)(criteria.list().iterator().next());
}
});
}
}
|
...
import com.innovaee.eorder.dao.UserDao;
import com.innovaee.eorder.entity.User;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.criterion.Restrictions;
import org.springframework.orm.hibernate3.HibernateCallback;
/**
* @Title: HibernateUserDao
...
* 手机号码
* @return 用户
*/
public User getUserByCellphone(final String cellphone) {
// String hql = "from User as user where user.cellphone = ?";
// Object[] paras = { cellphone };
// List<User> users = getPage(0, 1, hql, paras);
// if (0 != users.size()) {
// return users.get(0);
// }
//
// return null;
return getHibernateTemplate().execute(new HibernateCallback<User>(){
public User doInHibernate(Session session) {
Criteria criteria = session.createCriteria(User.class);
criteria.add(Restrictions.eq("cellphone", cellphone));
return (User)(criteria.list().iterator().next());
}
});
}
}
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.