commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
404e84224ed2e2acce9d1d67ae424a09fd89359e
|
src/vm/jvm/runtime/org/perl6/nqp/sixmodel/reprs/VMArrayInstanceBase.java
|
src/vm/jvm/runtime/org/perl6/nqp/sixmodel/reprs/VMArrayInstanceBase.java
|
package org.perl6.nqp.sixmodel.reprs;
import java.lang.System;
import org.perl6.nqp.runtime.ExceptionHandling;
import org.perl6.nqp.runtime.ThreadContext;
import org.perl6.nqp.sixmodel.SixModelObject;
public class VMArrayInstanceBase extends SixModelObject {
public long[] dimensions(ThreadContext tc) {
return new long[] { this.elems(tc) };
}
public void set_dimensions(ThreadContext tc, long[] dims) {
if (dims.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only have a single dimension");
this.set_elems(tc, dims[0]);
}
}
|
package org.perl6.nqp.sixmodel.reprs;
import java.lang.System;
import org.perl6.nqp.runtime.ExceptionHandling;
import org.perl6.nqp.runtime.ThreadContext;
import org.perl6.nqp.sixmodel.SixModelObject;
public class VMArrayInstanceBase extends SixModelObject {
public long[] dimensions(ThreadContext tc) {
return new long[] { this.elems(tc) };
}
public void set_dimensions(ThreadContext tc, long[] dims) {
if (dims.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only have a single dimension");
this.set_elems(tc, dims[0]);
}
public SixModelObject at_pos_multidim_boxed(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
return this.at_pos_boxed(tc, indices[0]);
}
public void at_pos_multidim_native(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.at_pos_native(tc, indices[0]);
}
public void bind_pos_multidim_boxed(ThreadContext tc, long[] indices, SixModelObject value) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.bind_pos_boxed(tc, indices[0], value);
}
public void bind_pos_multidim_native(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.bind_pos_native(tc, indices[0]);
}
}
|
Implement multi-dim access funcs in VMArray.
|
Implement multi-dim access funcs in VMArray.
|
Java
|
artistic-2.0
|
cygx/nqp,cygx/nqp,cygx/nqp,cygx/nqp,cygx/nqp,cygx/nqp,cygx/nqp,cygx/nqp
|
java
|
## Code Before:
package org.perl6.nqp.sixmodel.reprs;
import java.lang.System;
import org.perl6.nqp.runtime.ExceptionHandling;
import org.perl6.nqp.runtime.ThreadContext;
import org.perl6.nqp.sixmodel.SixModelObject;
public class VMArrayInstanceBase extends SixModelObject {
public long[] dimensions(ThreadContext tc) {
return new long[] { this.elems(tc) };
}
public void set_dimensions(ThreadContext tc, long[] dims) {
if (dims.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only have a single dimension");
this.set_elems(tc, dims[0]);
}
}
## Instruction:
Implement multi-dim access funcs in VMArray.
## Code After:
package org.perl6.nqp.sixmodel.reprs;
import java.lang.System;
import org.perl6.nqp.runtime.ExceptionHandling;
import org.perl6.nqp.runtime.ThreadContext;
import org.perl6.nqp.sixmodel.SixModelObject;
public class VMArrayInstanceBase extends SixModelObject {
public long[] dimensions(ThreadContext tc) {
return new long[] { this.elems(tc) };
}
public void set_dimensions(ThreadContext tc, long[] dims) {
if (dims.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only have a single dimension");
this.set_elems(tc, dims[0]);
}
public SixModelObject at_pos_multidim_boxed(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
return this.at_pos_boxed(tc, indices[0]);
}
public void at_pos_multidim_native(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.at_pos_native(tc, indices[0]);
}
public void bind_pos_multidim_boxed(ThreadContext tc, long[] indices, SixModelObject value) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.bind_pos_boxed(tc, indices[0], value);
}
public void bind_pos_multidim_native(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.bind_pos_native(tc, indices[0]);
}
}
|
// ... existing code ...
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only have a single dimension");
this.set_elems(tc, dims[0]);
}
public SixModelObject at_pos_multidim_boxed(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
return this.at_pos_boxed(tc, indices[0]);
}
public void at_pos_multidim_native(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.at_pos_native(tc, indices[0]);
}
public void bind_pos_multidim_boxed(ThreadContext tc, long[] indices, SixModelObject value) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.bind_pos_boxed(tc, indices[0], value);
}
public void bind_pos_multidim_native(ThreadContext tc, long[] indices) {
if (indices.length != 1)
throw ExceptionHandling.dieInternal(tc, "A dynamic array can only be indexed with a single dimension");
this.bind_pos_native(tc, indices[0]);
}
}
// ... rest of the code ...
|
1188c6fc6040387f404b1f11a05d2344d01ff0d4
|
app/src/main/java/com/akexorcist/mvpsimple/module/feed/FeedActivity.java
|
app/src/main/java/com/akexorcist/mvpsimple/module/feed/FeedActivity.java
|
package com.akexorcist.mvpsimple.module.feed;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.RecyclerView;
import com.akexorcist.mvpsimple.R;
public class FeedActivity extends AppCompatActivity implements FeedContractor.View {
private FeedContractor.Presenter feedPresenter;
private RecyclerView rvPostList;
private FeedAdapter feedAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(com.akexorcist.mvpsimple.R.layout.activity_feed);
bindView();
setupView();
createPresenter();
}
private void bindView() {
rvPostList = (RecyclerView) findViewById(R.id.rv_post_list);
}
private void setupView() {
feedAdapter = new FeedAdapter();
rvPostList.setAdapter(feedAdapter);
}
private void createPresenter() {
FeedPresenter.createPresenter(this);
}
@Override
protected void onResume() {
super.onResume();
feedPresenter.start();
}
@Override
public void updatePostList() {
feedAdapter.setPostItemList(feedPresenter.getPostList().getItemList());
}
@Override
public void showLoading() {
}
@Override
public void hideLoading() {
}
@Override
public void setPresenter(FeedContractor.Presenter presenter) {
this.feedPresenter = presenter;
}
}
|
package com.akexorcist.mvpsimple.module.feed;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import com.akexorcist.mvpsimple.R;
public class FeedActivity extends AppCompatActivity implements FeedContractor.View {
private FeedContractor.Presenter feedPresenter;
private RecyclerView rvPostList;
private FeedAdapter feedAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(com.akexorcist.mvpsimple.R.layout.activity_feed);
bindView();
setupView();
createPresenter();
}
private void bindView() {
rvPostList = (RecyclerView) findViewById(R.id.rv_post_list);
}
private void setupView() {
feedAdapter = new FeedAdapter();
rvPostList.setAdapter(feedAdapter);
rvPostList.setLayoutManager(new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false));
}
private void createPresenter() {
FeedPresenter.createPresenter(this);
}
@Override
protected void onResume() {
super.onResume();
feedPresenter.start();
}
@Override
public void updatePostList() {
feedAdapter.setPostItemList(feedPresenter.getPostList().getItemList());
}
@Override
public void showLoading() {
}
@Override
public void hideLoading() {
}
@Override
public void setPresenter(FeedContractor.Presenter presenter) {
this.feedPresenter = presenter;
}
}
|
Add linear layout manager to recycler view
|
Add linear layout manager to recycler view
|
Java
|
apache-2.0
|
akexorcist/Simple-MVP
|
java
|
## Code Before:
package com.akexorcist.mvpsimple.module.feed;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.RecyclerView;
import com.akexorcist.mvpsimple.R;
public class FeedActivity extends AppCompatActivity implements FeedContractor.View {
private FeedContractor.Presenter feedPresenter;
private RecyclerView rvPostList;
private FeedAdapter feedAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(com.akexorcist.mvpsimple.R.layout.activity_feed);
bindView();
setupView();
createPresenter();
}
private void bindView() {
rvPostList = (RecyclerView) findViewById(R.id.rv_post_list);
}
private void setupView() {
feedAdapter = new FeedAdapter();
rvPostList.setAdapter(feedAdapter);
}
private void createPresenter() {
FeedPresenter.createPresenter(this);
}
@Override
protected void onResume() {
super.onResume();
feedPresenter.start();
}
@Override
public void updatePostList() {
feedAdapter.setPostItemList(feedPresenter.getPostList().getItemList());
}
@Override
public void showLoading() {
}
@Override
public void hideLoading() {
}
@Override
public void setPresenter(FeedContractor.Presenter presenter) {
this.feedPresenter = presenter;
}
}
## Instruction:
Add linear layout manager to recycler view
## Code After:
package com.akexorcist.mvpsimple.module.feed;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import com.akexorcist.mvpsimple.R;
public class FeedActivity extends AppCompatActivity implements FeedContractor.View {
private FeedContractor.Presenter feedPresenter;
private RecyclerView rvPostList;
private FeedAdapter feedAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(com.akexorcist.mvpsimple.R.layout.activity_feed);
bindView();
setupView();
createPresenter();
}
private void bindView() {
rvPostList = (RecyclerView) findViewById(R.id.rv_post_list);
}
private void setupView() {
feedAdapter = new FeedAdapter();
rvPostList.setAdapter(feedAdapter);
rvPostList.setLayoutManager(new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false));
}
private void createPresenter() {
FeedPresenter.createPresenter(this);
}
@Override
protected void onResume() {
super.onResume();
feedPresenter.start();
}
@Override
public void updatePostList() {
feedAdapter.setPostItemList(feedPresenter.getPostList().getItemList());
}
@Override
public void showLoading() {
}
@Override
public void hideLoading() {
}
@Override
public void setPresenter(FeedContractor.Presenter presenter) {
this.feedPresenter = presenter;
}
}
|
# ... existing code ...
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import com.akexorcist.mvpsimple.R;
# ... modified code ...
private void setupView() {
feedAdapter = new FeedAdapter();
rvPostList.setAdapter(feedAdapter);
rvPostList.setLayoutManager(new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false));
}
private void createPresenter() {
# ... rest of the code ...
|
8b35032c6faac4a22fc63d933af3ce2b13deb32b
|
src/main/java/pl/touk/sputnik/processor/sonar/SonarRunnerBuilder.java
|
src/main/java/pl/touk/sputnik/processor/sonar/SonarRunnerBuilder.java
|
package pl.touk.sputnik.processor.sonar;
import java.util.List;
import org.sonar.runner.api.EmbeddedRunner;
import pl.touk.sputnik.review.Review;
import pl.touk.sputnik.review.ReviewFile;
import com.google.common.collect.Lists;
class SonarRunnerBuilder {
public SonarRunner prepareRunner(Review review) {
List<String> files = Lists.newArrayList();
for (ReviewFile file : review.getFiles()) {
files.add(file.getReviewFilename());
}
SonarRunner sonarRunner = new SonarRunner(files, EmbeddedRunner.create());
return sonarRunner;
}
}
|
package pl.touk.sputnik.processor.sonar;
import java.io.File;
import java.util.List;
import org.sonar.runner.api.EmbeddedRunner;
import pl.touk.sputnik.review.Review;
import pl.touk.sputnik.review.ReviewFile;
import com.google.common.collect.Lists;
class SonarRunnerBuilder {
public SonarRunner prepareRunner(Review review) {
List<String> files = Lists.newArrayList();
/*
* Some sonar plugin do no include git path as the source file path (for
* example, sonar-visual-studio plugin generates filenames that are relative
* to the module csproj file).
*
* Using the file basename and a recursive match allows Sonar to match
* modified files with their indexed names in Sonar.
*
* Although this can generate useless analysis (as some files that are not
* included in a review may be analysed), this has a limited additional
* cost considering that few files have the same basename inside a
* repository.
*/
for (ReviewFile file : review.getFiles()) {
files.add("**/" + new File(file.getReviewFilename()).getName());
}
SonarRunner sonarRunner = new SonarRunner(files, EmbeddedRunner.create());
return sonarRunner;
}
}
|
Use file basename instead of fullname in sonar
|
Use file basename instead of fullname in sonar
Some sonar plugin do no include git path as the source file path (for
example, sonar-visual-studio plugin generates filenames that are
relative to the module csproj file).
Using the file basename and a recursive match allows Sonar to match
modified files with their indexed names in Sonar.
Although this can generate useless analysis (as some files that are not
included in a review may be analysed), this has a limited additional
cost considering that few files have the same basename inside a
repository.
Change-Id: Ib74289e201098d15c618791f47b2dd657c3a1744
|
Java
|
apache-2.0
|
platan/sputnik,mirkosertic/sputnik,zdanek/sputnik,TouK/sputnik,zdanek/sputnik,zdanek/sputnik,TouK/sputnik,zdanek/sputnik,alien11689/sputnik,TouK/sputnik,mirkosertic/sputnik,julien-sobczak/maven-sonar-codereview-plugin,damianszczepanik/sputnik,platan/sputnik,damianszczepanik/sputnik,TouK/sputnik,TouK/sputnik,alien11689/sputnik,mihn/sputnik,mirkosertic/sputnik,mirkosertic/sputnik,alien11689/sputnik,julien-sobczak/maven-sonar-codereview-plugin,alien11689/sputnik,alien11689/sputnik,damianszczepanik/sputnik,julien-sobczak/maven-sonar-codereview-plugin,alien11689/sputnik,platan/sputnik,mihn/sputnik,platan/sputnik,platan/sputnik,zdanek/sputnik,mihn/sputnik
|
java
|
## Code Before:
package pl.touk.sputnik.processor.sonar;
import java.util.List;
import org.sonar.runner.api.EmbeddedRunner;
import pl.touk.sputnik.review.Review;
import pl.touk.sputnik.review.ReviewFile;
import com.google.common.collect.Lists;
class SonarRunnerBuilder {
public SonarRunner prepareRunner(Review review) {
List<String> files = Lists.newArrayList();
for (ReviewFile file : review.getFiles()) {
files.add(file.getReviewFilename());
}
SonarRunner sonarRunner = new SonarRunner(files, EmbeddedRunner.create());
return sonarRunner;
}
}
## Instruction:
Use file basename instead of fullname in sonar
Some sonar plugin do no include git path as the source file path (for
example, sonar-visual-studio plugin generates filenames that are
relative to the module csproj file).
Using the file basename and a recursive match allows Sonar to match
modified files with their indexed names in Sonar.
Although this can generate useless analysis (as some files that are not
included in a review may be analysed), this has a limited additional
cost considering that few files have the same basename inside a
repository.
Change-Id: Ib74289e201098d15c618791f47b2dd657c3a1744
## Code After:
package pl.touk.sputnik.processor.sonar;
import java.io.File;
import java.util.List;
import org.sonar.runner.api.EmbeddedRunner;
import pl.touk.sputnik.review.Review;
import pl.touk.sputnik.review.ReviewFile;
import com.google.common.collect.Lists;
class SonarRunnerBuilder {
public SonarRunner prepareRunner(Review review) {
List<String> files = Lists.newArrayList();
/*
* Some sonar plugin do no include git path as the source file path (for
* example, sonar-visual-studio plugin generates filenames that are relative
* to the module csproj file).
*
* Using the file basename and a recursive match allows Sonar to match
* modified files with their indexed names in Sonar.
*
* Although this can generate useless analysis (as some files that are not
* included in a review may be analysed), this has a limited additional
* cost considering that few files have the same basename inside a
* repository.
*/
for (ReviewFile file : review.getFiles()) {
files.add("**/" + new File(file.getReviewFilename()).getName());
}
SonarRunner sonarRunner = new SonarRunner(files, EmbeddedRunner.create());
return sonarRunner;
}
}
|
...
package pl.touk.sputnik.processor.sonar;
import java.io.File;
import java.util.List;
import org.sonar.runner.api.EmbeddedRunner;
...
class SonarRunnerBuilder {
public SonarRunner prepareRunner(Review review) {
List<String> files = Lists.newArrayList();
/*
* Some sonar plugin do no include git path as the source file path (for
* example, sonar-visual-studio plugin generates filenames that are relative
* to the module csproj file).
*
* Using the file basename and a recursive match allows Sonar to match
* modified files with their indexed names in Sonar.
*
* Although this can generate useless analysis (as some files that are not
* included in a review may be analysed), this has a limited additional
* cost considering that few files have the same basename inside a
* repository.
*/
for (ReviewFile file : review.getFiles()) {
files.add("**/" + new File(file.getReviewFilename()).getName());
}
SonarRunner sonarRunner = new SonarRunner(files, EmbeddedRunner.create());
return sonarRunner;
...
|
28e9cd3e9d047883668263e595978392cd208ac5
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
def readfile(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name="imagesort",
version="0.2.0",
packages=find_packages(),
install_requires=[
'ExifRead>=1.4.0',
],
include_package_data=True,
author="Børge Lanes",
author_email="[email protected]",
description=('Organize image files by date taken'),
long_description=readfile("README.rst"),
license="MIT",
keywords="media",
url="https://github.com/leinz/imagesort",
entry_points={
'console_scripts': [
'imagesort = imagesort.imagesort:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
)
|
from setuptools import setup, find_packages
import os
def readfile(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name="imagesort",
version="0.2.0",
packages=find_packages(),
install_requires=[
'ExifRead>=1.4.0',
],
include_package_data=True,
author="Børge Lanes",
author_email="[email protected]",
description=('Organize image files by date taken'),
long_description=readfile("README.rst"),
license="MIT",
keywords="media",
url="https://github.com/leinz/imagesort",
entry_points={
'console_scripts': [
'imagesort = imagesort.imagesort:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
)
|
Mark package as OS independent
|
Mark package as OS independent
|
Python
|
mit
|
leinz/imagesort
|
python
|
## Code Before:
from setuptools import setup, find_packages
import os
def readfile(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name="imagesort",
version="0.2.0",
packages=find_packages(),
install_requires=[
'ExifRead>=1.4.0',
],
include_package_data=True,
author="Børge Lanes",
author_email="[email protected]",
description=('Organize image files by date taken'),
long_description=readfile("README.rst"),
license="MIT",
keywords="media",
url="https://github.com/leinz/imagesort",
entry_points={
'console_scripts': [
'imagesort = imagesort.imagesort:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
)
## Instruction:
Mark package as OS independent
## Code After:
from setuptools import setup, find_packages
import os
def readfile(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name="imagesort",
version="0.2.0",
packages=find_packages(),
install_requires=[
'ExifRead>=1.4.0',
],
include_package_data=True,
author="Børge Lanes",
author_email="[email protected]",
description=('Organize image files by date taken'),
long_description=readfile("README.rst"),
license="MIT",
keywords="media",
url="https://github.com/leinz/imagesort",
entry_points={
'console_scripts': [
'imagesort = imagesort.imagesort:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
)
|
# ... existing code ...
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
# ... rest of the code ...
|
27674580f3246ebf5c53eca013337d3f685237bf
|
org.eclipse.winery.common/src/main/java/org/eclipse/winery/common/ids/Namespace.java
|
org.eclipse.winery.common/src/main/java/org/eclipse/winery/common/ids/Namespace.java
|
/*******************************************************************************
* Copyright (c) 2013 University of Stuttgart.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and the Apache License 2.0 which both accompany this distribution,
* and are available at http://www.eclipse.org/legal/epl-v10.html
* and http://www.apache.org/licenses/LICENSE-2.0
*
* Contributors:
* Oliver Kopp - initial API and implementation
*******************************************************************************/
package org.eclipse.winery.common.ids;
import org.eclipse.winery.common.StringEncodedAndDecoded;
public class Namespace extends StringEncodedAndDecoded {
public Namespace(String uri, boolean URLencoded) {
super(uri, URLencoded);
}
}
|
/*******************************************************************************
* Copyright (c) 2013,2015 University of Stuttgart.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and the Apache License 2.0 which both accompany this distribution,
* and are available at http://www.eclipse.org/legal/epl-v10.html
* and http://www.apache.org/licenses/LICENSE-2.0
*
* Contributors:
* Oliver Kopp - initial API and implementation
*******************************************************************************/
package org.eclipse.winery.common.ids;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.winery.common.StringEncodedAndDecoded;
public class Namespace extends StringEncodedAndDecoded {
public Namespace(String uri, boolean URLencoded) {
super(uri, URLencoded);
if (StringUtils.isEmpty(uri)) {
throw new IllegalArgumentException("uri must not be empty or null.");
}
}
}
|
Add check for null parameter
|
Add check for null parameter
|
Java
|
apache-2.0
|
YannicSowoidnich/winery,YannicSowoidnich/winery,YannicSowoidnich/winery,YannicSowoidnich/winery,YannicSowoidnich/winery
|
java
|
## Code Before:
/*******************************************************************************
* Copyright (c) 2013 University of Stuttgart.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and the Apache License 2.0 which both accompany this distribution,
* and are available at http://www.eclipse.org/legal/epl-v10.html
* and http://www.apache.org/licenses/LICENSE-2.0
*
* Contributors:
* Oliver Kopp - initial API and implementation
*******************************************************************************/
package org.eclipse.winery.common.ids;
import org.eclipse.winery.common.StringEncodedAndDecoded;
public class Namespace extends StringEncodedAndDecoded {
public Namespace(String uri, boolean URLencoded) {
super(uri, URLencoded);
}
}
## Instruction:
Add check for null parameter
## Code After:
/*******************************************************************************
* Copyright (c) 2013,2015 University of Stuttgart.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and the Apache License 2.0 which both accompany this distribution,
* and are available at http://www.eclipse.org/legal/epl-v10.html
* and http://www.apache.org/licenses/LICENSE-2.0
*
* Contributors:
* Oliver Kopp - initial API and implementation
*******************************************************************************/
package org.eclipse.winery.common.ids;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.winery.common.StringEncodedAndDecoded;
public class Namespace extends StringEncodedAndDecoded {
public Namespace(String uri, boolean URLencoded) {
super(uri, URLencoded);
if (StringUtils.isEmpty(uri)) {
throw new IllegalArgumentException("uri must not be empty or null.");
}
}
}
|
...
/*******************************************************************************
* Copyright (c) 2013,2015 University of Stuttgart.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and the Apache License 2.0 which both accompany this distribution,
...
*******************************************************************************/
package org.eclipse.winery.common.ids;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.winery.common.StringEncodedAndDecoded;
public class Namespace extends StringEncodedAndDecoded {
...
public Namespace(String uri, boolean URLencoded) {
super(uri, URLencoded);
if (StringUtils.isEmpty(uri)) {
throw new IllegalArgumentException("uri must not be empty or null.");
}
}
}
...
|
6696451b7c7a9b2de5b624b47159efae8fcf06b7
|
opwen_email_server/api/lokole.py
|
opwen_email_server/api/lokole.py
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
Disable linter in in-progress code
|
Disable linter in in-progress code
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
python
|
## Code Before:
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
## Instruction:
Disable linter in in-progress code
## Code After:
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
// ... existing code ...
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
// ... rest of the code ...
|
0ac9f362906e6d55d10d4c6ee1e0ce1f288821ee
|
rst2pdf/sectnumlinks.py
|
rst2pdf/sectnumlinks.py
|
import docutils
class SectNumFolder(docutils.nodes.SparseNodeVisitor):
def __init__(self, document):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = {}
def visit_generated(self, node):
for i in node.parent.parent['ids']:
self.sectnums[i]=node.parent.astext().replace(u'\xa0\xa0\xa0',' ')
class SectRefExpander(docutils.nodes.SparseNodeVisitor):
def __init__(self, document, sectnums):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = sectnums
def visit_reference(self, node):
if node.get('refid', None) in self.sectnums:
node.children=[docutils.nodes.Text('%s '%self.sectnums[node.get('refid')])]
|
import docutils
class SectNumFolder(docutils.nodes.SparseNodeVisitor):
def __init__(self, document):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = {}
def visit_generated(self, node):
for i in node.parent.parent['ids']:
self.sectnums[i]=node.parent.astext().replace(u'\xa0\xa0\xa0',' ')
def unknown_visit(self, node):
pass
class SectRefExpander(docutils.nodes.SparseNodeVisitor):
def __init__(self, document, sectnums):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = sectnums
def visit_reference(self, node):
if node.get('refid', None) in self.sectnums:
node.children=[docutils.nodes.Text('%s '%self.sectnums[node.get('refid')])]
def unknown_visit(self, node):
pass
|
Support visiting unknown nodes in SectNumFolder and SectRefExpander
|
Support visiting unknown nodes in SectNumFolder and SectRefExpander
|
Python
|
mit
|
rst2pdf/rst2pdf,rst2pdf/rst2pdf
|
python
|
## Code Before:
import docutils
class SectNumFolder(docutils.nodes.SparseNodeVisitor):
def __init__(self, document):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = {}
def visit_generated(self, node):
for i in node.parent.parent['ids']:
self.sectnums[i]=node.parent.astext().replace(u'\xa0\xa0\xa0',' ')
class SectRefExpander(docutils.nodes.SparseNodeVisitor):
def __init__(self, document, sectnums):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = sectnums
def visit_reference(self, node):
if node.get('refid', None) in self.sectnums:
node.children=[docutils.nodes.Text('%s '%self.sectnums[node.get('refid')])]
## Instruction:
Support visiting unknown nodes in SectNumFolder and SectRefExpander
## Code After:
import docutils
class SectNumFolder(docutils.nodes.SparseNodeVisitor):
def __init__(self, document):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = {}
def visit_generated(self, node):
for i in node.parent.parent['ids']:
self.sectnums[i]=node.parent.astext().replace(u'\xa0\xa0\xa0',' ')
def unknown_visit(self, node):
pass
class SectRefExpander(docutils.nodes.SparseNodeVisitor):
def __init__(self, document, sectnums):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
self.sectnums = sectnums
def visit_reference(self, node):
if node.get('refid', None) in self.sectnums:
node.children=[docutils.nodes.Text('%s '%self.sectnums[node.get('refid')])]
def unknown_visit(self, node):
pass
|
// ... existing code ...
for i in node.parent.parent['ids']:
self.sectnums[i]=node.parent.astext().replace(u'\xa0\xa0\xa0',' ')
def unknown_visit(self, node):
pass
class SectRefExpander(docutils.nodes.SparseNodeVisitor):
def __init__(self, document, sectnums):
docutils.nodes.SparseNodeVisitor.__init__(self, document)
// ... modified code ...
def visit_reference(self, node):
if node.get('refid', None) in self.sectnums:
node.children=[docutils.nodes.Text('%s '%self.sectnums[node.get('refid')])]
def unknown_visit(self, node):
pass
// ... rest of the code ...
|
5392af2781d6a6a7c71c15ab7699feb3e3b8f2f2
|
setup.py
|
setup.py
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
Add package data for GeoIP DB
|
Add package data for GeoIP DB
|
Python
|
mit
|
pebble/pypkjs
|
python
|
## Code Before:
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
## Instruction:
Add package data for GeoIP DB
## Code After:
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
// ... existing code ...
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
// ... rest of the code ...
|
aad9653d4626ebd295b76c1fa79217cf8f139c25
|
src/main/java/at/ac/tuwien/inso/repository/LecturerRepository.java
|
src/main/java/at/ac/tuwien/inso/repository/LecturerRepository.java
|
package at.ac.tuwien.inso.repository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
import at.ac.tuwien.inso.entity.Lecturer;
public interface LecturerRepository extends CrudRepository<Lecturer, Long> {
@Query("select l from Lecturer l where ACCOUNT_ID = ?1")
Lecturer findLecturerByAccountId(Long id);
public List<Lecturer> findAll();
public Lecturer findById(Long id);
}
|
package at.ac.tuwien.inso.repository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
import at.ac.tuwien.inso.entity.Lecturer;
public interface LecturerRepository extends CrudRepository<Lecturer, Long> {
@Query("select l from Lecturer l where ACCOUNT_ID = ?1")
Lecturer findLecturerByAccountId(Long id);
public List<Lecturer> findAllByIdentificationNumberLikeIgnoreCaseOrNameLikeIgnoreCase(
String identificationNumber,
String name
);
public Lecturer findById(Long id);
}
|
Replace findAll with complicated search operation
|
Replace findAll with complicated search operation
|
Java
|
mit
|
university-information-system/uis,university-information-system/uis,university-information-system/uis,university-information-system/uis
|
java
|
## Code Before:
package at.ac.tuwien.inso.repository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
import at.ac.tuwien.inso.entity.Lecturer;
public interface LecturerRepository extends CrudRepository<Lecturer, Long> {
@Query("select l from Lecturer l where ACCOUNT_ID = ?1")
Lecturer findLecturerByAccountId(Long id);
public List<Lecturer> findAll();
public Lecturer findById(Long id);
}
## Instruction:
Replace findAll with complicated search operation
## Code After:
package at.ac.tuwien.inso.repository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
import at.ac.tuwien.inso.entity.Lecturer;
public interface LecturerRepository extends CrudRepository<Lecturer, Long> {
@Query("select l from Lecturer l where ACCOUNT_ID = ?1")
Lecturer findLecturerByAccountId(Long id);
public List<Lecturer> findAllByIdentificationNumberLikeIgnoreCaseOrNameLikeIgnoreCase(
String identificationNumber,
String name
);
public Lecturer findById(Long id);
}
|
// ... existing code ...
@Query("select l from Lecturer l where ACCOUNT_ID = ?1")
Lecturer findLecturerByAccountId(Long id);
public List<Lecturer> findAllByIdentificationNumberLikeIgnoreCaseOrNameLikeIgnoreCase(
String identificationNumber,
String name
);
public Lecturer findById(Long id);
}
// ... rest of the code ...
|
4ded2f46d5c5b97df68d23a771fd289be7976bd8
|
src/test/java/tests/BaseTest.java
|
src/test/java/tests/BaseTest.java
|
package tests;
import org.openqa.selenium.WebDriver;
import org.testng.Assert;
import org.testng.annotations.*;
import pages.HomePage;
import selenium.WebDriverFactory;
import utils.Log4Test;
import utils.PropertyLoader;
public class BaseTest {
public static WebDriver driver;
@BeforeSuite
public void intEnv() {
driver = WebDriverFactory.initDriver(PropertyLoader.loadProperty("browser.name"));
driver.manage().window().maximize();
}
@BeforeTest
public void beforeTest() {
Log4Test.info("*#*#*#*#*#* Start of the test suite. *#*#*#*#*#*");
}
@BeforeMethod
public void beforeMethod() {
driver.manage().deleteAllCookies();
Log4Test.info("-_-_-_-_- Start of the test. -_-_-_-_-");
HomePage homePage = new HomePage(driver);
homePage.open();
Assert.assertTrue(homePage.isOpened(), Log4Test.error("Home page is not open."));
}
@AfterMethod
public void afterMethod() {
Log4Test.info("-_-_-_-_- End of the test. -_-_-_-_-");
}
@AfterTest
public void afterTest() {
Log4Test.info("*#*#*#*#*#* End of the test suite. *#*#*#*#*#*");
}
@AfterSuite
public void shutEnv() {
if (driver != null) {
driver.quit();
}
}
}
|
package tests;
import org.openqa.selenium.WebDriver;
import org.testng.Assert;
import org.testng.annotations.*;
import pages.HomePage;
import selenium.WebDriverFactory;
import utils.Log4Test;
import utils.PropertyLoader;
public class BaseTest {
public static WebDriver driver;
@BeforeSuite
public void intEnv() {
driver = WebDriverFactory.initDriver(PropertyLoader.loadProperty("browser.name"));
driver.manage().window().maximize();
}
@BeforeTest
public void beforeTest() {
Log4Test.info("*#*#*#*#*#* Start of the test suite. *#*#*#*#*#*");
}
@BeforeMethod
public void beforeMethod() {
Log4Test.info("-_-_-_-_- Start of the test. -_-_-_-_-");
HomePage homePage = new HomePage(driver);
homePage.open();
Assert.assertTrue(homePage.isOpened(), Log4Test.error("Home page is not open."));
}
@AfterMethod
public void afterMethod() {
Log4Test.info("-_-_-_-_- End of the test. -_-_-_-_-");
}
@AfterTest
public void afterTest() {
Log4Test.info("*#*#*#*#*#* End of the test suite. *#*#*#*#*#*");
}
@AfterSuite
public void shutEnv() {
if (driver != null) {
driver.quit();
}
}
}
|
Revert "Added cookies deletion step to @BeforeMethod."
|
Revert "Added cookies deletion step to @BeforeMethod."
This reverts commit bc44573ae5011b2155badbc77c530189e0b356ce.
|
Java
|
mit
|
meeroslaph/bionic-qa-selenium
|
java
|
## Code Before:
package tests;
import org.openqa.selenium.WebDriver;
import org.testng.Assert;
import org.testng.annotations.*;
import pages.HomePage;
import selenium.WebDriverFactory;
import utils.Log4Test;
import utils.PropertyLoader;
public class BaseTest {
public static WebDriver driver;
@BeforeSuite
public void intEnv() {
driver = WebDriverFactory.initDriver(PropertyLoader.loadProperty("browser.name"));
driver.manage().window().maximize();
}
@BeforeTest
public void beforeTest() {
Log4Test.info("*#*#*#*#*#* Start of the test suite. *#*#*#*#*#*");
}
@BeforeMethod
public void beforeMethod() {
driver.manage().deleteAllCookies();
Log4Test.info("-_-_-_-_- Start of the test. -_-_-_-_-");
HomePage homePage = new HomePage(driver);
homePage.open();
Assert.assertTrue(homePage.isOpened(), Log4Test.error("Home page is not open."));
}
@AfterMethod
public void afterMethod() {
Log4Test.info("-_-_-_-_- End of the test. -_-_-_-_-");
}
@AfterTest
public void afterTest() {
Log4Test.info("*#*#*#*#*#* End of the test suite. *#*#*#*#*#*");
}
@AfterSuite
public void shutEnv() {
if (driver != null) {
driver.quit();
}
}
}
## Instruction:
Revert "Added cookies deletion step to @BeforeMethod."
This reverts commit bc44573ae5011b2155badbc77c530189e0b356ce.
## Code After:
package tests;
import org.openqa.selenium.WebDriver;
import org.testng.Assert;
import org.testng.annotations.*;
import pages.HomePage;
import selenium.WebDriverFactory;
import utils.Log4Test;
import utils.PropertyLoader;
public class BaseTest {
public static WebDriver driver;
@BeforeSuite
public void intEnv() {
driver = WebDriverFactory.initDriver(PropertyLoader.loadProperty("browser.name"));
driver.manage().window().maximize();
}
@BeforeTest
public void beforeTest() {
Log4Test.info("*#*#*#*#*#* Start of the test suite. *#*#*#*#*#*");
}
@BeforeMethod
public void beforeMethod() {
Log4Test.info("-_-_-_-_- Start of the test. -_-_-_-_-");
HomePage homePage = new HomePage(driver);
homePage.open();
Assert.assertTrue(homePage.isOpened(), Log4Test.error("Home page is not open."));
}
@AfterMethod
public void afterMethod() {
Log4Test.info("-_-_-_-_- End of the test. -_-_-_-_-");
}
@AfterTest
public void afterTest() {
Log4Test.info("*#*#*#*#*#* End of the test suite. *#*#*#*#*#*");
}
@AfterSuite
public void shutEnv() {
if (driver != null) {
driver.quit();
}
}
}
|
// ... existing code ...
@BeforeMethod
public void beforeMethod() {
Log4Test.info("-_-_-_-_- Start of the test. -_-_-_-_-");
HomePage homePage = new HomePage(driver);
homePage.open();
// ... rest of the code ...
|
14e000acafe7c374294a7de6ffe295c9d56df68f
|
tests/test_postgresql_specific.py
|
tests/test_postgresql_specific.py
|
import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': 'dummy_data'})
assert tm.get_dirty_fields() == {}
tm.json_field = {'data': 'foo'}
assert tm.get_dirty_fields() == {'json_field': {'data': 'dummy_data'}}
|
import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': [1, 2, 3]})
data = tm.json_field['data']
data.append(4)
assert tm.get_dirty_fields(verbose=True) == {
'json_field': {
'current': {'data': [1, 2, 3, 4]},
'saved': {'data': [1, 2, 3]}
}
}
|
Update postgresql json_field to reflect deepcopy fix
|
Update postgresql json_field to reflect deepcopy fix
|
Python
|
bsd-3-clause
|
jdotjdot/django-dirtyfields,romgar/django-dirtyfields,smn/django-dirtyfields
|
python
|
## Code Before:
import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': 'dummy_data'})
assert tm.get_dirty_fields() == {}
tm.json_field = {'data': 'foo'}
assert tm.get_dirty_fields() == {'json_field': {'data': 'dummy_data'}}
## Instruction:
Update postgresql json_field to reflect deepcopy fix
## Code After:
import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': [1, 2, 3]})
data = tm.json_field['data']
data.append(4)
assert tm.get_dirty_fields(verbose=True) == {
'json_field': {
'current': {'data': [1, 2, 3, 4]},
'saved': {'data': [1, 2, 3]}
}
}
|
# ... existing code ...
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': [1, 2, 3]})
data = tm.json_field['data']
data.append(4)
assert tm.get_dirty_fields(verbose=True) == {
'json_field': {
'current': {'data': [1, 2, 3, 4]},
'saved': {'data': [1, 2, 3]}
}
}
# ... rest of the code ...
|
820c249122a76b13e1f54e2e082efb97e70d962c
|
rxproperty/src/main/java/com/github/kubode/rxproperty/ReadOnlyObservableProperty.java
|
rxproperty/src/main/java/com/github/kubode/rxproperty/ReadOnlyObservableProperty.java
|
package com.github.kubode.rxproperty;
import rx.Observable;
import rx.Observer;
import rx.Subscriber;
/**
* Read-only Observable property.
*
* @param <T> the type of this property.
*/
public class ReadOnlyObservableProperty<T> extends Observable<T> {
/**
* The state of {@link ReadOnlyObservableProperty}.
* Generally, {@link #getObservable()} should emit the most recent value when subscribed.
*
* @param <T> the type of value property and item expected to be observed by the {@link #getObservable()}.
*/
public interface State<T> {
/**
* Returns {@link Observable} of this state.
*
* @return an {@link Observable} that emits the most recent value it has observed
* and all subsequent observed items to each subscribed {@link Observer}.
*/
Observable<T> getObservable();
/**
* Returns value of this state.
*
* @return a value of this state.
*/
T getValue();
}
private final State<T> state;
/**
* Creates a ReadOnlyObservableProperty.
*
* @param state a state of this property.
*/
public ReadOnlyObservableProperty(final State<T> state) {
super(new OnSubscribe<T>() {
@Override
public void call(Subscriber<? super T> subscriber) {
subscriber.add(state.getObservable().subscribe(subscriber));
}
});
this.state = state;
}
/**
* Get value of this property.
*
* @return value of {@link #state#getValue()}.
*/
public T getValue() {
return state.getValue();
}
}
|
package com.github.kubode.rxproperty;
import rx.Observable;
import rx.Observer;
import rx.Subscriber;
/**
* Read-only Observable property.
*
* @param <T> the type of this property.
*/
public class ReadOnlyObservableProperty<T> extends Observable<T> {
/**
* The state of {@link ReadOnlyObservableProperty}.
* Generally, {@link #getObservable()} should emit the most recent value when subscribed.
*
* @param <T> the type of value property and item expected to be observed by the {@link #getObservable()}.
*/
public interface State<T> {
/**
* Returns {@link Observable} of this state.
*
* @return an {@link Observable} that emits the most recent value it has observed
* and all subsequent observed items to each subscribed {@link Observer}.
*/
Observable<T> getObservable();
/**
* Returns value of this state.
*
* @return a value of this state.
*/
T getValue();
}
private final State<T> state;
/**
* Creates a ReadOnlyObservableProperty.
*
* @param state a state of this property.
*/
public ReadOnlyObservableProperty(final State<T> state) {
super(new OnSubscribe<T>() {
@Override
public void call(Subscriber<? super T> subscriber) {
subscriber.add(state.getObservable().subscribe(subscriber));
}
});
this.state = state;
}
/**
* Get value of this property.
*
* @return value of this property.
*/
public T getValue() {
return state.getValue();
}
}
|
Fix fail on javadoc task.
|
Fix fail on javadoc task.
|
Java
|
apache-2.0
|
kubode/RxProperty,kubode/RxProperty,kubode/RxProperty
|
java
|
## Code Before:
package com.github.kubode.rxproperty;
import rx.Observable;
import rx.Observer;
import rx.Subscriber;
/**
* Read-only Observable property.
*
* @param <T> the type of this property.
*/
public class ReadOnlyObservableProperty<T> extends Observable<T> {
/**
* The state of {@link ReadOnlyObservableProperty}.
* Generally, {@link #getObservable()} should emit the most recent value when subscribed.
*
* @param <T> the type of value property and item expected to be observed by the {@link #getObservable()}.
*/
public interface State<T> {
/**
* Returns {@link Observable} of this state.
*
* @return an {@link Observable} that emits the most recent value it has observed
* and all subsequent observed items to each subscribed {@link Observer}.
*/
Observable<T> getObservable();
/**
* Returns value of this state.
*
* @return a value of this state.
*/
T getValue();
}
private final State<T> state;
/**
* Creates a ReadOnlyObservableProperty.
*
* @param state a state of this property.
*/
public ReadOnlyObservableProperty(final State<T> state) {
super(new OnSubscribe<T>() {
@Override
public void call(Subscriber<? super T> subscriber) {
subscriber.add(state.getObservable().subscribe(subscriber));
}
});
this.state = state;
}
/**
* Get value of this property.
*
* @return value of {@link #state#getValue()}.
*/
public T getValue() {
return state.getValue();
}
}
## Instruction:
Fix fail on javadoc task.
## Code After:
package com.github.kubode.rxproperty;
import rx.Observable;
import rx.Observer;
import rx.Subscriber;
/**
* Read-only Observable property.
*
* @param <T> the type of this property.
*/
public class ReadOnlyObservableProperty<T> extends Observable<T> {
/**
* The state of {@link ReadOnlyObservableProperty}.
* Generally, {@link #getObservable()} should emit the most recent value when subscribed.
*
* @param <T> the type of value property and item expected to be observed by the {@link #getObservable()}.
*/
public interface State<T> {
/**
* Returns {@link Observable} of this state.
*
* @return an {@link Observable} that emits the most recent value it has observed
* and all subsequent observed items to each subscribed {@link Observer}.
*/
Observable<T> getObservable();
/**
* Returns value of this state.
*
* @return a value of this state.
*/
T getValue();
}
private final State<T> state;
/**
* Creates a ReadOnlyObservableProperty.
*
* @param state a state of this property.
*/
public ReadOnlyObservableProperty(final State<T> state) {
super(new OnSubscribe<T>() {
@Override
public void call(Subscriber<? super T> subscriber) {
subscriber.add(state.getObservable().subscribe(subscriber));
}
});
this.state = state;
}
/**
* Get value of this property.
*
* @return value of this property.
*/
public T getValue() {
return state.getValue();
}
}
|
# ... existing code ...
/**
* Get value of this property.
*
* @return value of this property.
*/
public T getValue() {
return state.getValue();
# ... rest of the code ...
|
a9d3f47098bc7499d62d4866883fa45622f01b74
|
app/main/errors.py
|
app/main/errors.py
|
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
@main.app_errorhandler(404)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/404.html", **template_data), 404
@main.app_errorhandler(500)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/500.html", **template_data), 500
|
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
|
Add API error handling similar to supplier app
|
Add API error handling similar to supplier app
Currently 404s returned by the API are resulting in 500s on the buyer
app for invalid supplier requests. This change takes the model used in
the supplier frontend to automatically handle uncaught APIErrors. It is
not identical to the supplier app version because the default template
data is generated in a different way.
|
Python
|
mit
|
alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend
|
python
|
## Code Before:
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
@main.app_errorhandler(404)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/404.html", **template_data), 404
@main.app_errorhandler(500)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/500.html", **template_data), 500
## Instruction:
Add API error handling similar to supplier app
Currently 404s returned by the API are resulting in 500s on the buyer
app for invalid supplier requests. This change takes the model used in
the supplier frontend to automatically handle uncaught APIErrors. It is
not identical to the supplier app version because the default template
data is generated in a different way.
## Code After:
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
|
...
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
...
|
1e84e4f8cadd6f776bde4b64839a7e919cb95228
|
website/addons/s3/tests/factories.py
|
website/addons/s3/tests/factories.py
|
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
class Meta:
model = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
class Meta:
model = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
Use newer factory-boy Meta syntax for s3
|
Use newer factory-boy Meta syntax for s3
|
Python
|
apache-2.0
|
mluke93/osf.io,mattclark/osf.io,abought/osf.io,adlius/osf.io,laurenrevere/osf.io,kch8qx/osf.io,RomanZWang/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,hmoco/osf.io,caseyrollins/osf.io,rdhyee/osf.io,brianjgeiger/osf.io,zamattiac/osf.io,alexschiller/osf.io,kch8qx/osf.io,mfraezz/osf.io,alexschiller/osf.io,pattisdr/osf.io,SSJohns/osf.io,leb2dg/osf.io,acshi/osf.io,RomanZWang/osf.io,chennan47/osf.io,emetsger/osf.io,sloria/osf.io,hmoco/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,emetsger/osf.io,kch8qx/osf.io,icereval/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,cslzchen/osf.io,amyshi188/osf.io,mluo613/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,acshi/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,chennan47/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,chrisseto/osf.io,leb2dg/osf.io,emetsger/osf.io,rdhyee/osf.io,amyshi188/osf.io,samchrisinger/osf.io,acshi/osf.io,icereval/osf.io,rdhyee/osf.io,alexschiller/osf.io,hmoco/osf.io,Johnetordoff/osf.io,wearpants/osf.io,jnayak1/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,mluke93/osf.io,kwierman/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,zamattiac/osf.io,aaxelb/osf.io,binoculars/osf.io,mattclark/osf.io,mfraezz/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,Nesiehr/osf.io,kch8qx/osf.io,cslzchen/osf.io,alexschiller/osf.io,SSJohns/osf.io,doublebits/osf.io,leb2dg/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,adlius/osf.io,sloria/osf.io,erinspace/osf.io,zachjanicki/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,crcresearch/osf.io,amyshi188/osf.io,kwierman/osf.io,baylee-d/osf.io,wearpants/osf.io,abought/osf.io,sloria/osf.io,abought/osf.io,doublebits/osf.io,TomBaxter/osf.io,abought/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,baylee-d/osf.io,kwierman/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,binoculars/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,mattclark/osf.io,erinspace/osf.io,DanielSBrown/osf.io,felliott/osf.io,HalcyonChimera/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,TomHeatwole/osf.io,emetsger/osf.io,mluke93/osf.io,acshi/osf.io,aaxelb/osf.io,crcresearch/osf.io,TomHeatwole/osf.io,mluo613/osf.io,hmoco/osf.io,Johnetordoff/osf.io,jnayak1/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,zachjanicki/osf.io,mluo613/osf.io,zachjanicki/osf.io,icereval/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,baylee-d/osf.io,cwisecarver/osf.io,alexschiller/osf.io,zamattiac/osf.io,felliott/osf.io,mfraezz/osf.io,rdhyee/osf.io,acshi/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,adlius/osf.io,pattisdr/osf.io,wearpants/osf.io,adlius/osf.io,aaxelb/osf.io,mluo613/osf.io,felliott/osf.io,chennan47/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,doublebits/osf.io,cwisecarver/osf.io,doublebits/osf.io,laurenrevere/osf.io,jnayak1/osf.io,SSJohns/osf.io,TomBaxter/osf.io,aaxelb/osf.io,mluke93/osf.io,caseyrollins/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,doublebits/osf.io,mfraezz/osf.io,wearpants/osf.io,mluo613/osf.io
|
python
|
## Code Before:
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
## Instruction:
Use newer factory-boy Meta syntax for s3
## Code After:
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
class Meta:
model = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
class Meta:
model = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
...
class S3UserSettingsFactory(ModularOdmFactory):
class Meta:
model = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
class Meta:
model = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
...
|
51c6688569598e8adc4705cea4278eabc164e28a
|
pm-server/src/main/java/com/pm/server/PmServerApplication.java
|
pm-server/src/main/java/com/pm/server/PmServerApplication.java
|
package com.pm.server;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
@SpringBootApplication
public class PmServerApplication extends SpringBootServletInitializer {
public static void main(String[] args) {
SpringApplication.run(PmServerApplication.class, args);
}
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(PmServerApplication.class);
}
}
|
package com.pm.server;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
@SpringBootApplication
public class PmServerApplication extends SpringBootServletInitializer {
public static void main(String[] args) {
SpringApplication.run(PmServerApplication.class, args);
}
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(PmServerApplication.class);
}
}
|
Correct indentation for application configuration
|
Correct indentation for application configuration
|
Java
|
mit
|
pacmacro/pm-server,pacmacro/pm-server,pacmacro/pm-server
|
java
|
## Code Before:
package com.pm.server;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
@SpringBootApplication
public class PmServerApplication extends SpringBootServletInitializer {
public static void main(String[] args) {
SpringApplication.run(PmServerApplication.class, args);
}
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(PmServerApplication.class);
}
}
## Instruction:
Correct indentation for application configuration
## Code After:
package com.pm.server;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
@SpringBootApplication
public class PmServerApplication extends SpringBootServletInitializer {
public static void main(String[] args) {
SpringApplication.run(PmServerApplication.class, args);
}
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(PmServerApplication.class);
}
}
|
// ... existing code ...
SpringApplication.run(PmServerApplication.class, args);
}
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(PmServerApplication.class);
}
}
// ... rest of the code ...
|
1af2795907b3a686d9bce4bdc94b89f3678dd1af
|
corehq/apps/sms/migrations/0049_auto_enable_turnio_ff.py
|
corehq/apps/sms/migrations/0049_auto_enable_turnio_ff.py
|
from django.db import migrations
from corehq.messaging.smsbackends.turn.models import SQLTurnWhatsAppBackend
from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN
def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor):
for backend in SQLTurnWhatsAppBackend.active_objects.all():
domain = backend.domain
TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN)
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('sms', '0048_delete_sqlicdsbackend'),
]
operations = [
migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop),
]
|
from django.db import migrations
from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN
def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor):
SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend')
for backend in SQLTurnWhatsAppBackend.objects.all():
# Check for backend.deleted to account for active_objects
if not backend.deleted:
domain = backend.domain
TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN)
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('sms', '0048_delete_sqlicdsbackend'),
]
operations = [
migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop),
]
|
Use historical model in migration, not directly imported model
|
Use historical model in migration, not directly imported model
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
python
|
## Code Before:
from django.db import migrations
from corehq.messaging.smsbackends.turn.models import SQLTurnWhatsAppBackend
from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN
def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor):
for backend in SQLTurnWhatsAppBackend.active_objects.all():
domain = backend.domain
TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN)
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('sms', '0048_delete_sqlicdsbackend'),
]
operations = [
migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop),
]
## Instruction:
Use historical model in migration, not directly imported model
## Code After:
from django.db import migrations
from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN
def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor):
SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend')
for backend in SQLTurnWhatsAppBackend.objects.all():
# Check for backend.deleted to account for active_objects
if not backend.deleted:
domain = backend.domain
TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN)
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('sms', '0048_delete_sqlicdsbackend'),
]
operations = [
migrations.RunPython(auto_enable_turnio_ff_for_certain_domains, reverse_code=noop),
]
|
# ... existing code ...
from django.db import migrations
from corehq.toggles import TURN_IO_BACKEND, NAMESPACE_DOMAIN
def auto_enable_turnio_ff_for_certain_domains(apps, schema_editor):
SQLTurnWhatsAppBackend = apps.get_model('sms', 'SQLTurnWhatsAppBackend')
for backend in SQLTurnWhatsAppBackend.objects.all():
# Check for backend.deleted to account for active_objects
if not backend.deleted:
domain = backend.domain
TURN_IO_BACKEND.set(item=domain, enabled=True, namespace=NAMESPACE_DOMAIN)
def noop(apps, schema_editor):
# ... rest of the code ...
|
97942010558a4b3e242b66607c1f62742a29e00f
|
opendaylight/netconf/config-netconf-connector/src/main/java/org/opendaylight/controller/netconf/confignetconfconnector/osgi/YangStoreServiceImpl.java
|
opendaylight/netconf/config-netconf-connector/src/main/java/org/opendaylight/controller/netconf/confignetconfconnector/osgi/YangStoreServiceImpl.java
|
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.netconf.confignetconfconnector.osgi;
import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider;
import javax.annotation.concurrent.GuardedBy;
public class YangStoreServiceImpl implements YangStoreService {
private final SchemaContextProvider service;
@GuardedBy("this")
private YangStoreSnapshotImpl cache = null;
public YangStoreServiceImpl(SchemaContextProvider service) {
this.service = service;
}
@Override
public synchronized YangStoreSnapshotImpl getYangStoreSnapshot() throws YangStoreException {
if (cache == null) {
cache = new YangStoreSnapshotImpl(service.getSchemaContext());
}
return cache;
}
/**
* Called when schema context changes, invalidates cache.
*/
public synchronized void refresh() {
cache = null;
}
}
|
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.netconf.confignetconfconnector.osgi;
import java.lang.ref.SoftReference;
import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider;
import javax.annotation.concurrent.GuardedBy;
public class YangStoreServiceImpl implements YangStoreService {
private final SchemaContextProvider service;
@GuardedBy("this")
private SoftReference<YangStoreSnapshotImpl> cache = new SoftReference<>(null);
public YangStoreServiceImpl(SchemaContextProvider service) {
this.service = service;
}
@Override
public synchronized YangStoreSnapshotImpl getYangStoreSnapshot() throws YangStoreException {
YangStoreSnapshotImpl yangStoreSnapshot = cache.get();
if (yangStoreSnapshot == null) {
yangStoreSnapshot = new YangStoreSnapshotImpl(service.getSchemaContext());
cache = new SoftReference<>(yangStoreSnapshot);
}
return yangStoreSnapshot;
}
/**
* Called when schema context changes, invalidates cache.
*/
public synchronized void refresh() {
cache.clear();
}
}
|
Store yang store snapshot cache using soft reference.
|
Store yang store snapshot cache using soft reference.
Change-Id: I9b159db83ba204b4a636f2314fd4fc2e7b6f654c
Signed-off-by: Tomas Olvecky <[email protected]>
|
Java
|
epl-1.0
|
Sushma7785/OpenDayLight-Load-Balancer,Sushma7785/OpenDayLight-Load-Balancer,my76128/controller,inocybe/odl-controller,my76128/controller,522986491/controller,tx1103mark/controller,Johnson-Chou/test,tx1103mark/controller,my76128/controller,inocybe/odl-controller,mandeepdhami/controller,aryantaheri/controller,tx1103mark/controller,aryantaheri/monitoring-controller,opendaylight/controller,aryantaheri/controller,my76128/controller,aryantaheri/monitoring-controller,aryantaheri/monitoring-controller,aryantaheri/monitoring-controller,522986491/controller,tx1103mark/controller,mandeepdhami/controller,aryantaheri/controller,Johnson-Chou/test,mandeepdhami/controller,mandeepdhami/controller
|
java
|
## Code Before:
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.netconf.confignetconfconnector.osgi;
import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider;
import javax.annotation.concurrent.GuardedBy;
public class YangStoreServiceImpl implements YangStoreService {
private final SchemaContextProvider service;
@GuardedBy("this")
private YangStoreSnapshotImpl cache = null;
public YangStoreServiceImpl(SchemaContextProvider service) {
this.service = service;
}
@Override
public synchronized YangStoreSnapshotImpl getYangStoreSnapshot() throws YangStoreException {
if (cache == null) {
cache = new YangStoreSnapshotImpl(service.getSchemaContext());
}
return cache;
}
/**
* Called when schema context changes, invalidates cache.
*/
public synchronized void refresh() {
cache = null;
}
}
## Instruction:
Store yang store snapshot cache using soft reference.
Change-Id: I9b159db83ba204b4a636f2314fd4fc2e7b6f654c
Signed-off-by: Tomas Olvecky <[email protected]>
## Code After:
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.netconf.confignetconfconnector.osgi;
import java.lang.ref.SoftReference;
import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider;
import javax.annotation.concurrent.GuardedBy;
public class YangStoreServiceImpl implements YangStoreService {
private final SchemaContextProvider service;
@GuardedBy("this")
private SoftReference<YangStoreSnapshotImpl> cache = new SoftReference<>(null);
public YangStoreServiceImpl(SchemaContextProvider service) {
this.service = service;
}
@Override
public synchronized YangStoreSnapshotImpl getYangStoreSnapshot() throws YangStoreException {
YangStoreSnapshotImpl yangStoreSnapshot = cache.get();
if (yangStoreSnapshot == null) {
yangStoreSnapshot = new YangStoreSnapshotImpl(service.getSchemaContext());
cache = new SoftReference<>(yangStoreSnapshot);
}
return yangStoreSnapshot;
}
/**
* Called when schema context changes, invalidates cache.
*/
public synchronized void refresh() {
cache.clear();
}
}
|
// ... existing code ...
package org.opendaylight.controller.netconf.confignetconfconnector.osgi;
import java.lang.ref.SoftReference;
import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider;
import javax.annotation.concurrent.GuardedBy;
// ... modified code ...
public class YangStoreServiceImpl implements YangStoreService {
private final SchemaContextProvider service;
@GuardedBy("this")
private SoftReference<YangStoreSnapshotImpl> cache = new SoftReference<>(null);
public YangStoreServiceImpl(SchemaContextProvider service) {
this.service = service;
...
@Override
public synchronized YangStoreSnapshotImpl getYangStoreSnapshot() throws YangStoreException {
YangStoreSnapshotImpl yangStoreSnapshot = cache.get();
if (yangStoreSnapshot == null) {
yangStoreSnapshot = new YangStoreSnapshotImpl(service.getSchemaContext());
cache = new SoftReference<>(yangStoreSnapshot);
}
return yangStoreSnapshot;
}
/**
...
* Called when schema context changes, invalidates cache.
*/
public synchronized void refresh() {
cache.clear();
}
}
// ... rest of the code ...
|
214fc4a52f1ff774705ebc408bc144d0cf6c1c7c
|
src/main/java/seedu/address/logic/parser/CliSyntax.java
|
src/main/java/seedu/address/logic/parser/CliSyntax.java
|
package seedu.address.logic.parser;
import java.util.regex.Pattern;
import seedu.address.logic.parser.ArgumentTokenizer.Prefix;
/**
* Contains Command Line Interface (CLI) syntax definitions common to multiple commands
*/
public class CliSyntax {
/* Prefix definitions */
public static final Prefix PREFIX_NAME = new Prefix("n/");
public static final Prefix PREFIX_DATE = new Prefix("due/");
public static final Prefix PREFIX_TIME = new Prefix("t/");
public static final Prefix PREFIX_TAG = new Prefix("#");
public static final Prefix PREFIX_DESCRIPTION = new Prefix("d/");
public static final Prefix PREFIX_VENUE = new Prefix("@");
public static final Prefix PREFIX_PRIORITY = new Prefix("p/");
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
}
|
package seedu.address.logic.parser;
import java.util.regex.Pattern;
import seedu.address.logic.parser.ArgumentTokenizer.Prefix;
/**
* Contains Command Line Interface (CLI) syntax definitions common to multiple commands
*/
public class CliSyntax {
/* Prefix definitions */
public static final Prefix PREFIX_NAME = new Prefix("n/");
public static final Prefix PREFIX_DATE = new Prefix("due/");
public static final Prefix PREFIX_TIME = new Prefix("t/");
public static final Prefix PREFIX_TAG = new Prefix("#");
public static final Prefix PREFIX_DESCRIPTION = new Prefix("d/");
public static final Prefix PREFIX_VENUE = new Prefix("@");
public static final Prefix PREFIX_PRIORITY = new Prefix("p/");
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT_LIST =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
}
|
Add more patterns for future use
|
[Parser] Add more patterns for future use
|
Java
|
mit
|
CS2103JAN2017-W10-B1/main,CS2103JAN2017-W10-B1/main
|
java
|
## Code Before:
package seedu.address.logic.parser;
import java.util.regex.Pattern;
import seedu.address.logic.parser.ArgumentTokenizer.Prefix;
/**
* Contains Command Line Interface (CLI) syntax definitions common to multiple commands
*/
public class CliSyntax {
/* Prefix definitions */
public static final Prefix PREFIX_NAME = new Prefix("n/");
public static final Prefix PREFIX_DATE = new Prefix("due/");
public static final Prefix PREFIX_TIME = new Prefix("t/");
public static final Prefix PREFIX_TAG = new Prefix("#");
public static final Prefix PREFIX_DESCRIPTION = new Prefix("d/");
public static final Prefix PREFIX_VENUE = new Prefix("@");
public static final Prefix PREFIX_PRIORITY = new Prefix("p/");
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
}
## Instruction:
[Parser] Add more patterns for future use
## Code After:
package seedu.address.logic.parser;
import java.util.regex.Pattern;
import seedu.address.logic.parser.ArgumentTokenizer.Prefix;
/**
* Contains Command Line Interface (CLI) syntax definitions common to multiple commands
*/
public class CliSyntax {
/* Prefix definitions */
public static final Prefix PREFIX_NAME = new Prefix("n/");
public static final Prefix PREFIX_DATE = new Prefix("due/");
public static final Prefix PREFIX_TIME = new Prefix("t/");
public static final Prefix PREFIX_TAG = new Prefix("#");
public static final Prefix PREFIX_DESCRIPTION = new Prefix("d/");
public static final Prefix PREFIX_VENUE = new Prefix("@");
public static final Prefix PREFIX_PRIORITY = new Prefix("p/");
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT_LIST =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
}
|
// ... existing code ...
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
/* Patterns definitions */
public static final Pattern KEYWORDS_ARGS_FORMAT_LIST =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
}
// ... rest of the code ...
|
0eabc95105fecfd4b960b1c135f589f0eea9de2a
|
flaskrst/modules/staticpages/__init__.py
|
flaskrst/modules/staticpages/__init__.py
|
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_name': 'index'})
@static_pages.route('/<file_name>')
def show(file_name):
rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
Support of static pages inside of a directory
|
Support of static pages inside of a directory
|
Python
|
bsd-3-clause
|
jarus/flask-rst
|
python
|
## Code Before:
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_name': 'index'})
@static_pages.route('/<file_name>')
def show(file_name):
rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
## Instruction:
Support of static pages inside of a directory
## Code After:
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
...
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
...
|
b68576d307474eaf6bd8a8853bee767c391d28b9
|
conjure/connection.py
|
conjure/connection.py
|
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
if username and password:
db.authenticate(username, password)
return db
|
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
return db
|
Remove authenticate call to fix issues with pymongo 3.7
|
Remove authenticate call to fix issues with pymongo 3.7
|
Python
|
mit
|
GGOutfitters/conjure
|
python
|
## Code Before:
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
if username and password:
db.authenticate(username, password)
return db
## Instruction:
Remove authenticate call to fix issues with pymongo 3.7
## Code After:
from .exceptions import ConnectionError
from pymongo import MongoClient
from pymongo.uri_parser import parse_uri
_connections = {}
try:
import gevent
except ImportError:
gevent = None
def _get_connection(uri):
global _connections
parsed_uri = parse_uri(uri)
hosts = parsed_uri['nodelist']
hosts = ['%s:%d' % host for host in hosts]
key = ','.join(hosts)
connection = _connections.get(key)
if connection is None:
try:
connection = _connections[key] = MongoClient(uri)
except Exception as e:
raise ConnectionError(e.message)
return connection
def connect(uri):
parsed_uri = parse_uri(uri)
username = parsed_uri['username']
password = parsed_uri['password']
database = parsed_uri['database']
db = _get_connection(uri)[database]
return db
|
# ... existing code ...
db = _get_connection(uri)[database]
return db
# ... rest of the code ...
|
d6500b3d9af37fb2cd0fa14c82f78b165f9d221b
|
test_framework/test_settings.py
|
test_framework/test_settings.py
|
from .settings import * # NOQA
# Django 1.8 still has INSTALLED_APPS as a tuple
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('djoyapp')
|
from .settings import * # NOQA
INSTALLED_APPS.append('djoyapp')
|
Remove handling of apps tuple, it is always list now
|
Remove handling of apps tuple, it is always list now
Since Django 1.11, app settings are lists by default
|
Python
|
mit
|
jamescooke/factory_djoy
|
python
|
## Code Before:
from .settings import * # NOQA
# Django 1.8 still has INSTALLED_APPS as a tuple
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('djoyapp')
## Instruction:
Remove handling of apps tuple, it is always list now
Since Django 1.11, app settings are lists by default
## Code After:
from .settings import * # NOQA
INSTALLED_APPS.append('djoyapp')
|
# ... existing code ...
from .settings import * # NOQA
INSTALLED_APPS.append('djoyapp')
# ... rest of the code ...
|
e0c3fe2b1ecb4caf33b9ba3dafabe4eedae97c5e
|
spiralgalaxygame/tests/test_sentinel.py
|
spiralgalaxygame/tests/test_sentinel.py
|
import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
|
import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_repr(self):
self.assertEqual(repr(self.e), '<Enum blue, green, red>')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
|
Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage.
|
Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage.
|
Python
|
agpl-3.0
|
nejucomo/sgg,nejucomo/sgg,nejucomo/sgg
|
python
|
## Code Before:
import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
## Instruction:
Add a test for ``Enum.__repr__``; ``spiralgalaxygame.sentinel`` now has full coverage.
## Code After:
import unittest
from spiralgalaxygame.sentinel import Sentinel, Enum
class SentinelTests (unittest.TestCase):
def setUp(self):
self.s = Sentinel('thingy')
def test_name(self):
self.assertIs(self.s.name, 'thingy')
def test_repr(self):
self.assertEqual(repr(self.s), '<Sentinel thingy>')
def test_identity_by_name(self):
s2 = Sentinel('th' + 'ingy') # `+` to avoid string interning.
self.assertIs(s2, self.s)
class EnumTests (unittest.TestCase):
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_repr(self):
self.assertEqual(repr(self.e), '<Enum blue, green, red>')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
def test_member_as_attr_and_in_operator(self):
self.assertIn(self.e.green, self.e)
|
...
def setUp(self):
self.e = Enum('red', 'green', 'blue')
def test_repr(self):
self.assertEqual(repr(self.e), '<Enum blue, green, red>')
def test_iter_and_members_are_sentinels(self):
for member in self.e:
self.assertIsInstance(member, Sentinel)
...
|
5c40cbfcb89649738945eda02c1bfb804e2ecdae
|
us_ignite/mailinglist/views.py
|
us_ignite/mailinglist/views.py
|
import hashlib
import mailchimp
from django.contrib import messages
from django.conf import settings
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from us_ignite.mailinglist.forms import EmailForm
def subscribe_email(email):
master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
mailing_list = mailchimp.Lists(master)
uid = hashlib.md5(email).hexdigest()
email_data = {
'email': email,
'euid': uid,
'leid': uid,
}
return mailing_list.subscribe(
settings.MAILCHIMP_LIST, email_data)
def mailing_subscribe(request):
"""Handles MailChimp email registration."""
if request.method == 'POST':
form = EmailForm(request.POST)
if form.is_valid():
try:
subscribe_email(form.cleaned_data['email'])
messages.success(request, 'Successfully subscribed.')
redirect_to = 'home'
except mailchimp.ListAlreadySubscribedError:
messages.error(request, 'Already subscribed.')
redirect_to = 'mailing_subscribe'
return redirect(redirect_to)
else:
form = EmailForm()
context = {
'form': form,
}
return TemplateResponse(request, 'mailinglist/form.html', context)
|
import hashlib
import logging
import mailchimp
from django.contrib import messages
from django.conf import settings
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from us_ignite.mailinglist.forms import EmailForm
logger = logging.getLogger('us_ignite.mailinglist.views')
def subscribe_email(email):
master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
mailing_list = mailchimp.Lists(master)
uid = hashlib.md5(email).hexdigest()
email_data = {
'email': email,
'euid': uid,
'leid': uid,
}
return mailing_list.subscribe(
settings.MAILCHIMP_LIST, email_data)
def mailing_subscribe(request):
"""Handles MailChimp email registration."""
if request.method == 'POST':
form = EmailForm(request.POST)
if form.is_valid():
try:
subscribe_email(form.cleaned_data['email'])
messages.success(request, 'Successfully subscribed.')
redirect_to = 'home'
except mailchimp.ListAlreadySubscribedError:
messages.error(request, 'Already subscribed.')
redirect_to = 'mailing_subscribe'
except Exception, e:
logger.exception(e)
msg = (u'There is a problem with the maling list. '
'Please try again later.')
messages.error(request, msg)
redirect_to = 'mailing_subscribe'
return redirect(redirect_to)
else:
form = EmailForm()
context = {
'form': form,
}
return TemplateResponse(request, 'mailinglist/form.html', context)
|
Improve handling of errors during mailing list subscription.
|
Improve handling of errors during mailing list subscription.
https://github.com/madewithbytes/us_ignite/issues/209
Any exception thrown by the mailchimp component will
be handled gracefully and logged.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
python
|
## Code Before:
import hashlib
import mailchimp
from django.contrib import messages
from django.conf import settings
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from us_ignite.mailinglist.forms import EmailForm
def subscribe_email(email):
master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
mailing_list = mailchimp.Lists(master)
uid = hashlib.md5(email).hexdigest()
email_data = {
'email': email,
'euid': uid,
'leid': uid,
}
return mailing_list.subscribe(
settings.MAILCHIMP_LIST, email_data)
def mailing_subscribe(request):
"""Handles MailChimp email registration."""
if request.method == 'POST':
form = EmailForm(request.POST)
if form.is_valid():
try:
subscribe_email(form.cleaned_data['email'])
messages.success(request, 'Successfully subscribed.')
redirect_to = 'home'
except mailchimp.ListAlreadySubscribedError:
messages.error(request, 'Already subscribed.')
redirect_to = 'mailing_subscribe'
return redirect(redirect_to)
else:
form = EmailForm()
context = {
'form': form,
}
return TemplateResponse(request, 'mailinglist/form.html', context)
## Instruction:
Improve handling of errors during mailing list subscription.
https://github.com/madewithbytes/us_ignite/issues/209
Any exception thrown by the mailchimp component will
be handled gracefully and logged.
## Code After:
import hashlib
import logging
import mailchimp
from django.contrib import messages
from django.conf import settings
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from us_ignite.mailinglist.forms import EmailForm
logger = logging.getLogger('us_ignite.mailinglist.views')
def subscribe_email(email):
master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
mailing_list = mailchimp.Lists(master)
uid = hashlib.md5(email).hexdigest()
email_data = {
'email': email,
'euid': uid,
'leid': uid,
}
return mailing_list.subscribe(
settings.MAILCHIMP_LIST, email_data)
def mailing_subscribe(request):
"""Handles MailChimp email registration."""
if request.method == 'POST':
form = EmailForm(request.POST)
if form.is_valid():
try:
subscribe_email(form.cleaned_data['email'])
messages.success(request, 'Successfully subscribed.')
redirect_to = 'home'
except mailchimp.ListAlreadySubscribedError:
messages.error(request, 'Already subscribed.')
redirect_to = 'mailing_subscribe'
except Exception, e:
logger.exception(e)
msg = (u'There is a problem with the maling list. '
'Please try again later.')
messages.error(request, msg)
redirect_to = 'mailing_subscribe'
return redirect(redirect_to)
else:
form = EmailForm()
context = {
'form': form,
}
return TemplateResponse(request, 'mailinglist/form.html', context)
|
// ... existing code ...
import hashlib
import logging
import mailchimp
from django.contrib import messages
// ... modified code ...
from django.template.response import TemplateResponse
from us_ignite.mailinglist.forms import EmailForm
logger = logging.getLogger('us_ignite.mailinglist.views')
def subscribe_email(email):
...
except mailchimp.ListAlreadySubscribedError:
messages.error(request, 'Already subscribed.')
redirect_to = 'mailing_subscribe'
except Exception, e:
logger.exception(e)
msg = (u'There is a problem with the maling list. '
'Please try again later.')
messages.error(request, msg)
redirect_to = 'mailing_subscribe'
return redirect(redirect_to)
else:
form = EmailForm()
// ... rest of the code ...
|
e7d42847284ae73befad8bdf2fa035a6f95a82bb
|
tests/test_dow.py
|
tests/test_dow.py
|
from datetime import datetime
import pycron
def test_parser():
now = datetime(2015, 6, 18, 16, 7)
assert pycron.is_now('* * * * *', now)
assert pycron.is_now('* * * * 4', now)
assert pycron.is_now('* * * * */4', now)
assert pycron.is_now('* * * * 0,3,4', now)
assert pycron.is_now('* * * * 3', now) is False
assert pycron.is_now('* * * * */3', now) is False
assert pycron.is_now('* * * * 0,3,6', now) is False
assert pycron.DOW_CHOICES[now.isoweekday()][1] == 'Thursday'
assert pycron.DOW_CHOICES[0][1] == 'Sunday'
now = datetime(2015, 6, 21, 16, 7)
assert pycron.is_now('* * * * 0', now)
|
from datetime import datetime, timedelta
import pycron
def test_parser():
now = datetime(2015, 6, 18, 16, 7)
assert pycron.is_now('* * * * *', now)
assert pycron.is_now('* * * * 4', now)
assert pycron.is_now('* * * * */4', now)
assert pycron.is_now('* * * * 0,3,4', now)
assert pycron.is_now('* * * * 3', now) is False
assert pycron.is_now('* * * * */3', now) is False
assert pycron.is_now('* * * * 0,3,6', now) is False
assert pycron.DOW_CHOICES[now.isoweekday()][1] == 'Thursday'
assert pycron.DOW_CHOICES[0][1] == 'Sunday'
now = datetime(2015, 6, 20, 16, 7)
for i in range(0, 7):
# Test day matching from Sunday onwards...
now += timedelta(days=1)
assert pycron.is_now('* * * * %i' % (i), now)
# Test weekdays
assert pycron.is_now('* * * * 1,2,3,4,5', now) is (True if i not in [0, 6] else False)
# Test weekends
assert pycron.is_now('* * * * 0,6', now) is (True if i in [0, 6] else False)
|
Add more thorough testing of day of week.
|
Add more thorough testing of day of week.
|
Python
|
mit
|
kipe/pycron
|
python
|
## Code Before:
from datetime import datetime
import pycron
def test_parser():
now = datetime(2015, 6, 18, 16, 7)
assert pycron.is_now('* * * * *', now)
assert pycron.is_now('* * * * 4', now)
assert pycron.is_now('* * * * */4', now)
assert pycron.is_now('* * * * 0,3,4', now)
assert pycron.is_now('* * * * 3', now) is False
assert pycron.is_now('* * * * */3', now) is False
assert pycron.is_now('* * * * 0,3,6', now) is False
assert pycron.DOW_CHOICES[now.isoweekday()][1] == 'Thursday'
assert pycron.DOW_CHOICES[0][1] == 'Sunday'
now = datetime(2015, 6, 21, 16, 7)
assert pycron.is_now('* * * * 0', now)
## Instruction:
Add more thorough testing of day of week.
## Code After:
from datetime import datetime, timedelta
import pycron
def test_parser():
now = datetime(2015, 6, 18, 16, 7)
assert pycron.is_now('* * * * *', now)
assert pycron.is_now('* * * * 4', now)
assert pycron.is_now('* * * * */4', now)
assert pycron.is_now('* * * * 0,3,4', now)
assert pycron.is_now('* * * * 3', now) is False
assert pycron.is_now('* * * * */3', now) is False
assert pycron.is_now('* * * * 0,3,6', now) is False
assert pycron.DOW_CHOICES[now.isoweekday()][1] == 'Thursday'
assert pycron.DOW_CHOICES[0][1] == 'Sunday'
now = datetime(2015, 6, 20, 16, 7)
for i in range(0, 7):
# Test day matching from Sunday onwards...
now += timedelta(days=1)
assert pycron.is_now('* * * * %i' % (i), now)
# Test weekdays
assert pycron.is_now('* * * * 1,2,3,4,5', now) is (True if i not in [0, 6] else False)
# Test weekends
assert pycron.is_now('* * * * 0,6', now) is (True if i in [0, 6] else False)
|
# ... existing code ...
from datetime import datetime, timedelta
import pycron
# ... modified code ...
assert pycron.DOW_CHOICES[now.isoweekday()][1] == 'Thursday'
assert pycron.DOW_CHOICES[0][1] == 'Sunday'
now = datetime(2015, 6, 20, 16, 7)
for i in range(0, 7):
# Test day matching from Sunday onwards...
now += timedelta(days=1)
assert pycron.is_now('* * * * %i' % (i), now)
# Test weekdays
assert pycron.is_now('* * * * 1,2,3,4,5', now) is (True if i not in [0, 6] else False)
# Test weekends
assert pycron.is_now('* * * * 0,6', now) is (True if i in [0, 6] else False)
# ... rest of the code ...
|
3befcbaf3a78a46edc31cc1910fcd8e0a9381102
|
money_conversion/money.py
|
money_conversion/money.py
|
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
|
from currency_rates import rates
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
def to_currency(self, new_currency):
new_currency = new_currency.split('_')[1].upper()
amount = self.amount
base_currency_rates = rates.get(self.currency)
new_amount = amount * base_currency_rates.get(new_currency)
return Money(new_amount, new_currency)
|
Add to_currency method in order to be able to convert to a new currency
|
Add to_currency method in order to be able to convert to a new currency
|
Python
|
mit
|
mdsrosa/money-conversion-py
|
python
|
## Code Before:
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
## Instruction:
Add to_currency method in order to be able to convert to a new currency
## Code After:
from currency_rates import rates
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
def to_currency(self, new_currency):
new_currency = new_currency.split('_')[1].upper()
amount = self.amount
base_currency_rates = rates.get(self.currency)
new_amount = amount * base_currency_rates.get(new_currency)
return Money(new_amount, new_currency)
|
...
from currency_rates import rates
class Money(object):
...
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
def to_currency(self, new_currency):
new_currency = new_currency.split('_')[1].upper()
amount = self.amount
base_currency_rates = rates.get(self.currency)
new_amount = amount * base_currency_rates.get(new_currency)
return Money(new_amount, new_currency)
...
|
f3c9550ee6719278cba7f4edda26274220e954f3
|
setup.py
|
setup.py
|
import re
from setuptools import setup, find_packages
data_files = {
"share/osbs": [
"inputs/prod.json",
"inputs/prod_inner.json",
"inputs/simple.json",
"inputs/simple_inner.json",
],
}
def _get_requirements(path):
try:
with open(path) as f:
packages = f.read().splitlines()
except (IOError, OSError) as ex:
raise RuntimeError("Can't open file with requirements: %s", repr(ex))
return [p.strip() for p in packages if not re.match(r"^\s*#", p)]
def _install_requirements():
requirements = _get_requirements('requirements.txt')
return requirements
setup(
name="osbs-client",
description='Python module and command line client for OpenShift Build Service',
version="0.14",
author='Tomas Tomecek',
author_email='[email protected]',
url='https://github.com/projectatomic/osbs-client',
license="BSD",
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts': ['osbs=osbs.cli.main:main'],
},
install_requires=_install_requirements(),
data_files=data_files.items(),
)
|
import re
from setuptools import setup, find_packages
data_files = {
"share/osbs": [
"inputs/prod.json",
"inputs/prod_inner.json",
"inputs/simple.json",
"inputs/simple_inner.json",
],
}
def _get_requirements(path):
try:
with open(path) as f:
packages = f.read().splitlines()
except (IOError, OSError) as ex:
raise RuntimeError("Can't open file with requirements: %s", repr(ex))
return [p.strip() for p in packages if not re.match(r"^\s*#", p)]
def _install_requirements():
requirements = _get_requirements('requirements.txt')
return requirements
setup(
name="osbs-client",
description='Python module and command line client for OpenShift Build Service',
version="0.14",
author='Red Hat, Inc.',
author_email='[email protected]',
url='https://github.com/projectatomic/osbs-client',
license="BSD",
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts': ['osbs=osbs.cli.main:main'],
},
install_requires=_install_requirements(),
data_files=data_files.items(),
)
|
Change author and email to the same as atomic-reactor has
|
Change author and email to the same as atomic-reactor has
|
Python
|
bsd-3-clause
|
bfontecc007/osbs-client,vrutkovs/osbs-client,twaugh/osbs-client,vrutkovs/osbs-client,jpopelka/osbs-client,DBuildService/osbs-client,projectatomic/osbs-client,bfontecc007/osbs-client,twaugh/osbs-client,pombredanne/osbs-client,jpopelka/osbs-client,DBuildService/osbs-client,projectatomic/osbs-client,pombredanne/osbs-client
|
python
|
## Code Before:
import re
from setuptools import setup, find_packages
data_files = {
"share/osbs": [
"inputs/prod.json",
"inputs/prod_inner.json",
"inputs/simple.json",
"inputs/simple_inner.json",
],
}
def _get_requirements(path):
try:
with open(path) as f:
packages = f.read().splitlines()
except (IOError, OSError) as ex:
raise RuntimeError("Can't open file with requirements: %s", repr(ex))
return [p.strip() for p in packages if not re.match(r"^\s*#", p)]
def _install_requirements():
requirements = _get_requirements('requirements.txt')
return requirements
setup(
name="osbs-client",
description='Python module and command line client for OpenShift Build Service',
version="0.14",
author='Tomas Tomecek',
author_email='[email protected]',
url='https://github.com/projectatomic/osbs-client',
license="BSD",
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts': ['osbs=osbs.cli.main:main'],
},
install_requires=_install_requirements(),
data_files=data_files.items(),
)
## Instruction:
Change author and email to the same as atomic-reactor has
## Code After:
import re
from setuptools import setup, find_packages
data_files = {
"share/osbs": [
"inputs/prod.json",
"inputs/prod_inner.json",
"inputs/simple.json",
"inputs/simple_inner.json",
],
}
def _get_requirements(path):
try:
with open(path) as f:
packages = f.read().splitlines()
except (IOError, OSError) as ex:
raise RuntimeError("Can't open file with requirements: %s", repr(ex))
return [p.strip() for p in packages if not re.match(r"^\s*#", p)]
def _install_requirements():
requirements = _get_requirements('requirements.txt')
return requirements
setup(
name="osbs-client",
description='Python module and command line client for OpenShift Build Service',
version="0.14",
author='Red Hat, Inc.',
author_email='[email protected]',
url='https://github.com/projectatomic/osbs-client',
license="BSD",
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts': ['osbs=osbs.cli.main:main'],
},
install_requires=_install_requirements(),
data_files=data_files.items(),
)
|
# ... existing code ...
name="osbs-client",
description='Python module and command line client for OpenShift Build Service',
version="0.14",
author='Red Hat, Inc.',
author_email='[email protected]',
url='https://github.com/projectatomic/osbs-client',
license="BSD",
packages=find_packages(exclude=["tests"]),
# ... rest of the code ...
|
c9ca9ae51ebc976bc60b982b9e98f68325301aea
|
corehq/util/es/interface.py
|
corehq/util/es/interface.py
|
class ElasticsearchInterface(object):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
|
import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2
|
Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
python
|
## Code Before:
class ElasticsearchInterface(object):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
## Instruction:
Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2
## Code After:
import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
// ... existing code ...
import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
// ... rest of the code ...
|
1896776f9811f953fd50f3e24f782cb26430af70
|
lang-impl/src/com/intellij/find/findUsages/CommonFindUsagesDialog.java
|
lang-impl/src/com/intellij/find/findUsages/CommonFindUsagesDialog.java
|
package com.intellij.find.findUsages;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.usageView.UsageViewUtil;
import javax.swing.*;
/**
* Created by IntelliJ IDEA.
* User: max
* Date: Feb 14, 2005
* Time: 5:40:05 PM
* To change this template use File | Settings | File Templates.
*/
public class CommonFindUsagesDialog extends AbstractFindUsagesDialog {
private PsiElement myPsiElement;
public CommonFindUsagesDialog(PsiElement element,
Project project,
FindUsagesOptions findUsagesOptions,
boolean toShowInNewTab,
boolean mustOpenInNewTab,
boolean isSingleFile) {
super(project, findUsagesOptions, toShowInNewTab, mustOpenInNewTab, isSingleFile,
FindUsagesUtil.isSearchForTextOccurencesAvailable(element, isSingleFile), !isSingleFile && !element.getManager().isInProject(element));
myPsiElement = element;
}
protected JPanel createFindWhatPanel() {
return null;
}
protected JComponent getPreferredFocusedControl() {
return null;
}
public String getLabelText() {
return StringUtil.capitalize(UsageViewUtil.getType(myPsiElement)) + " " + UsageViewUtil.getDescriptiveName(myPsiElement);
}
}
|
package com.intellij.find.findUsages;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.usageView.UsageViewUtil;
import javax.swing.*;
/**
* Created by IntelliJ IDEA.
* User: max
* Date: Feb 14, 2005
* Time: 5:40:05 PM
* To change this template use File | Settings | File Templates.
*/
public class CommonFindUsagesDialog extends AbstractFindUsagesDialog {
private PsiElement myPsiElement;
public CommonFindUsagesDialog(PsiElement element,
Project project,
FindUsagesOptions findUsagesOptions,
boolean toShowInNewTab,
boolean mustOpenInNewTab,
boolean isSingleFile) {
super(project, findUsagesOptions, toShowInNewTab, mustOpenInNewTab, isSingleFile,
FindUsagesUtil.isSearchForTextOccurencesAvailable(element, isSingleFile), !isSingleFile && !element.getManager().isInProject(element));
myPsiElement = element;
init();
}
protected JPanel createFindWhatPanel() {
return null;
}
protected JComponent getPreferredFocusedControl() {
return null;
}
public String getLabelText() {
return StringUtil.capitalize(UsageViewUtil.getType(myPsiElement)) + " " + UsageViewUtil.getDescriptiveName(myPsiElement);
}
}
|
Fix non-java find usages dialogs.
|
Fix non-java find usages dialogs.
|
Java
|
apache-2.0
|
Distrotech/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,izonder/intellij-community,apixandru/intellij-community,asedunov/intellij-community,semonte/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,vladmm/intellij-community,supersven/intellij-community,clumsy/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,jexp/idea2,petteyg/intellij-community,jexp/idea2,orekyuu/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,joewalnes/idea-community,muntasirsyed/intellij-community,consulo/consulo,samthor/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,fnouama/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,FHannes/intellij-community,FHannes/intellij-community,samthor/intellij-community,diorcety/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ryano144/intellij-community,hurricup/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,caot/intellij-community,slisson/intellij-community,allotria/intellij-community,Lekanich/intellij-community,allotria/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,caot/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,xfournet/intellij-community,joewalnes/idea-community,kool79/intellij-community,wreckJ/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,signed/intellij-community,semonte/intellij-community,kdwink/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,asedunov/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,adedayo/intellij-community,da1z/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,slisson/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,allotria/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,supersven/intellij-community,jexp/idea2,michaelgallacher/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,amith01994/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,petteyg/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,kool79/intellij-community,xfournet/intellij-community,FHannes/intellij-community,petteyg/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,izonder/intellij-community,jagguli/intellij-community,semonte/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,holmes/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,jexp/idea2,SerCeMan/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,clumsy/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,dslomov/intellij-community,hurricup/intellij-community,supersven/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,gnuhub/intellij-community,allotria/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,da1z/intellij-community,jagguli/intellij-community,apixandru/intellij-community,fnouama/intellij-community,ernestp/consulo,gnuhub/intellij-community,allotria/intellij-community,consulo/consulo,vvv1559/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,ibinti/intellij-community,clumsy/intellij-community,caot/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,da1z/intellij-community,holmes/intellij-community,ryano144/intellij-community,ryano144/intellij-community,da1z/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,slisson/intellij-community,izonder/intellij-community,hurricup/intellij-community,kdwink/intellij-community,signed/intellij-community,Distrotech/intellij-community,da1z/intellij-community,hurricup/intellij-community,jagguli/intellij-community,diorcety/intellij-community,retomerz/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,joewalnes/idea-community,allotria/intellij-community,izonder/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,da1z/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,semonte/intellij-community,retomerz/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,petteyg/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,ahb0327/intellij-community,consulo/consulo,idea4bsd/idea4bsd,diorcety/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,kool79/intellij-community,apixandru/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,da1z/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,jexp/idea2,jexp/idea2,consulo/consulo,michaelgallacher/intellij-community,supersven/intellij-community,suncycheng/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,vladmm/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,joewalnes/idea-community,orekyuu/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,vladmm/intellij-community,fnouama/intellij-community,robovm/robovm-studio,kool79/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,samthor/intellij-community,izonder/intellij-community,Distrotech/intellij-community,supersven/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,ernestp/consulo,youdonghai/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,ernestp/consulo,retomerz/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,caot/intellij-community,vladmm/intellij-community,jexp/idea2,mglukhikh/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,signed/intellij-community,caot/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,adedayo/intellij-community,ryano144/intellij-community,supersven/intellij-community,signed/intellij-community,youdonghai/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,joewalnes/idea-community,amith01994/intellij-community,holmes/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,consulo/consulo,jagguli/intellij-community,fitermay/intellij-community,petteyg/intellij-community,dslomov/intellij-community,kdwink/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,apixandru/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,signed/intellij-community,semonte/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,ernestp/consulo,clumsy/intellij-community,retomerz/intellij-community,ernestp/consulo,supersven/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,samthor/intellij-community,ibinti/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,allotria/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,semonte/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,slisson/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,wreckJ/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,caot/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,kool79/intellij-community,dslomov/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,caot/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,da1z/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,youdonghai/intellij-community,supersven/intellij-community,slisson/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,FHannes/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,joewalnes/idea-community,adedayo/intellij-community,hurricup/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,kool79/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,apixandru/intellij-community,xfournet/intellij-community,slisson/intellij-community,jagguli/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,slisson/intellij-community,apixandru/intellij-community,caot/intellij-community,retomerz/intellij-community,FHannes/intellij-community,signed/intellij-community,retomerz/intellij-community,amith01994/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,consulo/consulo,muntasirsyed/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,vladmm/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,holmes/intellij-community,semonte/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,ernestp/consulo,joewalnes/idea-community,suncycheng/intellij-community,holmes/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,jagguli/intellij-community,vladmm/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,izonder/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,signed/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,ryano144/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,kool79/intellij-community,ahb0327/intellij-community,caot/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,kdwink/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,caot/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,suncycheng/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,jexp/idea2,samthor/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,holmes/intellij-community,blademainer/intellij-community,fitermay/intellij-community,robovm/robovm-studio,adedayo/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,retomerz/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,petteyg/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,dslomov/intellij-community,clumsy/intellij-community,fitermay/intellij-community,samthor/intellij-community
|
java
|
## Code Before:
package com.intellij.find.findUsages;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.usageView.UsageViewUtil;
import javax.swing.*;
/**
* Created by IntelliJ IDEA.
* User: max
* Date: Feb 14, 2005
* Time: 5:40:05 PM
* To change this template use File | Settings | File Templates.
*/
public class CommonFindUsagesDialog extends AbstractFindUsagesDialog {
private PsiElement myPsiElement;
public CommonFindUsagesDialog(PsiElement element,
Project project,
FindUsagesOptions findUsagesOptions,
boolean toShowInNewTab,
boolean mustOpenInNewTab,
boolean isSingleFile) {
super(project, findUsagesOptions, toShowInNewTab, mustOpenInNewTab, isSingleFile,
FindUsagesUtil.isSearchForTextOccurencesAvailable(element, isSingleFile), !isSingleFile && !element.getManager().isInProject(element));
myPsiElement = element;
}
protected JPanel createFindWhatPanel() {
return null;
}
protected JComponent getPreferredFocusedControl() {
return null;
}
public String getLabelText() {
return StringUtil.capitalize(UsageViewUtil.getType(myPsiElement)) + " " + UsageViewUtil.getDescriptiveName(myPsiElement);
}
}
## Instruction:
Fix non-java find usages dialogs.
## Code After:
package com.intellij.find.findUsages;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.usageView.UsageViewUtil;
import javax.swing.*;
/**
* Created by IntelliJ IDEA.
* User: max
* Date: Feb 14, 2005
* Time: 5:40:05 PM
* To change this template use File | Settings | File Templates.
*/
public class CommonFindUsagesDialog extends AbstractFindUsagesDialog {
private PsiElement myPsiElement;
public CommonFindUsagesDialog(PsiElement element,
Project project,
FindUsagesOptions findUsagesOptions,
boolean toShowInNewTab,
boolean mustOpenInNewTab,
boolean isSingleFile) {
super(project, findUsagesOptions, toShowInNewTab, mustOpenInNewTab, isSingleFile,
FindUsagesUtil.isSearchForTextOccurencesAvailable(element, isSingleFile), !isSingleFile && !element.getManager().isInProject(element));
myPsiElement = element;
init();
}
protected JPanel createFindWhatPanel() {
return null;
}
protected JComponent getPreferredFocusedControl() {
return null;
}
public String getLabelText() {
return StringUtil.capitalize(UsageViewUtil.getType(myPsiElement)) + " " + UsageViewUtil.getDescriptiveName(myPsiElement);
}
}
|
// ... existing code ...
super(project, findUsagesOptions, toShowInNewTab, mustOpenInNewTab, isSingleFile,
FindUsagesUtil.isSearchForTextOccurencesAvailable(element, isSingleFile), !isSingleFile && !element.getManager().isInProject(element));
myPsiElement = element;
init();
}
protected JPanel createFindWhatPanel() {
// ... rest of the code ...
|
77c245240fcccf1c7c6f3251168801de45182b8d
|
klaxer/__init__.py
|
klaxer/__init__.py
|
"""Entry point for all things Klaxer"""
__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, Will Schneider, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
APP_NAME = 'Klaxer'
|
"""Entry point for all things Klaxer"""
__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
APP_NAME = 'Klaxer'
|
Adjust author list to only include contributors.
|
Adjust author list to only include contributors.
|
Python
|
mit
|
klaxer/klaxer
|
python
|
## Code Before:
"""Entry point for all things Klaxer"""
__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, Will Schneider, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
APP_NAME = 'Klaxer'
## Instruction:
Adjust author list to only include contributors.
## Code After:
"""Entry point for all things Klaxer"""
__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
APP_NAME = 'Klaxer'
|
...
"""Entry point for all things Klaxer"""
__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
...
|
82c57b4fad49b171cd0833b38867474d6578220c
|
client/examples/followbot.py
|
client/examples/followbot.py
|
from botchallenge import *
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
|
from botchallenge import *
import time
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
else:
time.sleep(2)
|
Add timer to follow bot
|
Add timer to follow bot
|
Python
|
mit
|
Rafiot/botchallenge,Rafiot/botchallenge,Rafiot/botchallenge,Rafiot/botchallenge
|
python
|
## Code Before:
from botchallenge import *
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
## Instruction:
Add timer to follow bot
## Code After:
from botchallenge import *
import time
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
robot = Robot(USERNAME, SERVER)
while True:
me = robot.get_location()
owner = robot.get_owner_location()
print(me.distance(owner))
if me.distance(owner) > 4:
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
else:
time.sleep(2)
|
...
from botchallenge import *
import time
USERNAME = "" # Put your minecraft username here
SERVER = "" # Put the address of the minecraft server here
...
d = robot.find_path(owner)
robot.turn(d)
robot.move(d)
else:
time.sleep(2)
...
|
01658167ee891257737b787d58b815dbbf1292f0
|
src/main/java/net/omniscimus/fireworks/commands/StopallCommand.java
|
src/main/java/net/omniscimus/fireworks/commands/StopallCommand.java
|
package net.omniscimus.fireworks.commands;
import java.io.UnsupportedEncodingException;
import org.bukkit.ChatColor;
import org.bukkit.command.CommandSender;
import net.omniscimus.fireworks.ShowHandler;
import net.omniscimus.fireworks.commands.exceptions.WrongArgumentsNumberException;
/**
* This command stops all currently running fireworks shows.
*
* @author Omniscimus
*/
public class StopallCommand extends FireworksCommand {
private final ShowHandler showHandler;
public StopallCommand(ShowHandler showHandler) {
this.showHandler = showHandler;
}
@Override
public void run(CommandSender sender, String[] args)
throws WrongArgumentsNumberException, UnsupportedEncodingException {
if (args.length == 0) {
showHandler.stopAllShows();
sender.sendMessage(ChatColor.GOLD + "All " + showHandler.getNumberOfRunningShows() + " fireworks shows have stopped.");
} else {
throw new WrongArgumentsNumberException();
}
}
}
|
package net.omniscimus.fireworks.commands;
import java.io.UnsupportedEncodingException;
import org.bukkit.ChatColor;
import org.bukkit.command.CommandSender;
import net.omniscimus.fireworks.ShowHandler;
import net.omniscimus.fireworks.commands.exceptions.WrongArgumentsNumberException;
/**
* This command stops all currently running fireworks shows.
*
* @author Omniscimus
*/
public class StopallCommand extends FireworksCommand {
private final ShowHandler showHandler;
public StopallCommand(ShowHandler showHandler) {
this.showHandler = showHandler;
}
@Override
public void run(CommandSender sender, String[] args)
throws WrongArgumentsNumberException, UnsupportedEncodingException {
if (args.length == 0) {
int number = showHandler.getNumberOfRunningShows();
showHandler.stopAllShows();
sender.sendMessage(ChatColor.GOLD + "All " + number + " fireworks shows have stopped.");
} else {
throw new WrongArgumentsNumberException();
}
}
}
|
Fix wrong number of stopped shows in stopall cmd
|
Fix wrong number of stopped shows in stopall cmd
This occurred because the number of running shows was checked after
stopping them all. Therefore it was always 0.
|
Java
|
mit
|
Omniscimus/Fireworks
|
java
|
## Code Before:
package net.omniscimus.fireworks.commands;
import java.io.UnsupportedEncodingException;
import org.bukkit.ChatColor;
import org.bukkit.command.CommandSender;
import net.omniscimus.fireworks.ShowHandler;
import net.omniscimus.fireworks.commands.exceptions.WrongArgumentsNumberException;
/**
* This command stops all currently running fireworks shows.
*
* @author Omniscimus
*/
public class StopallCommand extends FireworksCommand {
private final ShowHandler showHandler;
public StopallCommand(ShowHandler showHandler) {
this.showHandler = showHandler;
}
@Override
public void run(CommandSender sender, String[] args)
throws WrongArgumentsNumberException, UnsupportedEncodingException {
if (args.length == 0) {
showHandler.stopAllShows();
sender.sendMessage(ChatColor.GOLD + "All " + showHandler.getNumberOfRunningShows() + " fireworks shows have stopped.");
} else {
throw new WrongArgumentsNumberException();
}
}
}
## Instruction:
Fix wrong number of stopped shows in stopall cmd
This occurred because the number of running shows was checked after
stopping them all. Therefore it was always 0.
## Code After:
package net.omniscimus.fireworks.commands;
import java.io.UnsupportedEncodingException;
import org.bukkit.ChatColor;
import org.bukkit.command.CommandSender;
import net.omniscimus.fireworks.ShowHandler;
import net.omniscimus.fireworks.commands.exceptions.WrongArgumentsNumberException;
/**
* This command stops all currently running fireworks shows.
*
* @author Omniscimus
*/
public class StopallCommand extends FireworksCommand {
private final ShowHandler showHandler;
public StopallCommand(ShowHandler showHandler) {
this.showHandler = showHandler;
}
@Override
public void run(CommandSender sender, String[] args)
throws WrongArgumentsNumberException, UnsupportedEncodingException {
if (args.length == 0) {
int number = showHandler.getNumberOfRunningShows();
showHandler.stopAllShows();
sender.sendMessage(ChatColor.GOLD + "All " + number + " fireworks shows have stopped.");
} else {
throw new WrongArgumentsNumberException();
}
}
}
|
...
throws WrongArgumentsNumberException, UnsupportedEncodingException {
if (args.length == 0) {
int number = showHandler.getNumberOfRunningShows();
showHandler.stopAllShows();
sender.sendMessage(ChatColor.GOLD + "All " + number + " fireworks shows have stopped.");
} else {
throw new WrongArgumentsNumberException();
}
...
|
dac3cedaee583db4cc3c05a9cb2c4f15a707123e
|
pylib/mapit/middleware.py
|
pylib/mapit/middleware.py
|
import re
class JSONPMiddleware(object):
def process_response(self, request, response):
if request.GET.get('callback') and re.match('[a-zA-Z0-9_]+$', request.GET.get('callback')):
response.content = request.GET.get('callback') + '(' + response.content + ')'
return response
|
import re
class JSONPMiddleware(object):
def process_response(self, request, response):
if request.GET.get('callback') and re.match('[a-zA-Z0-9_]+$', request.GET.get('callback')):
response.content = request.GET.get('callback') + '(' + response.content + ')'
response.status_code = 200 # Must return OK for JSONP to be processed
return response
|
Set up JSONP requests to always return 200.
|
Set up JSONP requests to always return 200.
|
Python
|
agpl-3.0
|
Sinar/mapit,Code4SA/mapit,New-Bamboo/mapit,opencorato/mapit,Sinar/mapit,opencorato/mapit,opencorato/mapit,chris48s/mapit,chris48s/mapit,Code4SA/mapit,chris48s/mapit,New-Bamboo/mapit,Code4SA/mapit
|
python
|
## Code Before:
import re
class JSONPMiddleware(object):
def process_response(self, request, response):
if request.GET.get('callback') and re.match('[a-zA-Z0-9_]+$', request.GET.get('callback')):
response.content = request.GET.get('callback') + '(' + response.content + ')'
return response
## Instruction:
Set up JSONP requests to always return 200.
## Code After:
import re
class JSONPMiddleware(object):
def process_response(self, request, response):
if request.GET.get('callback') and re.match('[a-zA-Z0-9_]+$', request.GET.get('callback')):
response.content = request.GET.get('callback') + '(' + response.content + ')'
response.status_code = 200 # Must return OK for JSONP to be processed
return response
|
...
def process_response(self, request, response):
if request.GET.get('callback') and re.match('[a-zA-Z0-9_]+$', request.GET.get('callback')):
response.content = request.GET.get('callback') + '(' + response.content + ')'
response.status_code = 200 # Must return OK for JSONP to be processed
return response
...
|
1310c9b735555ded81f2cf088d047fe8e62db294
|
test.c
|
test.c
|
int main(int argc, char ** argv){
printf("Test %d\n", argc);
int i;
for(i = 0; i < argc; i++){
printf("%d: %s\n", i, argv[i]);
}
struct timeval tv;
if(gettimeofday(&tv, NULL) == 0){
printf("Gettimeofday %d %d\n", tv.tv_sec, tv.tv_usec);
void* test = malloc(1024*1024);
if(test > 0){
printf("Malloc %x\n", test);
free(test);
printf("Free\n");
DIR * dir = opendir("file:///");
if (dir != NULL) {
struct dirent * ent;
while ((ent = readdir(dir)) != NULL) {
printf("%s\n", ent->d_name);
}
closedir(dir);
pid_t pid = fork();
if(pid == 0){
printf("Fork Parent\n");
}else if(pid > 0){
printf("Fork Child %d\n", pid);
} else {
printf("Fork Failed\n");
}
}else{
printf("Opendir Failed\n");
}
} else {
printf("Malloc Failed\n");
}
} else {
printf("Gettimeofday Failed\n");
}
return 0;
}
|
int main(int argc, char ** argv){
printf("Test %d\n", argc);
int i;
for(i = 0; i < argc; i++){
printf("%d: %s\n", i, argv[i]);
}
struct timespec tp;
if(clock_gettime(CLOCK_REALTIME, &tp) == 0){
printf("clock_gettime %d %d\n", tp.tv_sec, tp.tv_nsec);
void* test = malloc(1024*1024);
if(test > 0){
printf("Malloc %x\n", test);
free(test);
printf("Free\n");
DIR * dir = opendir("file:///");
if (dir != NULL) {
struct dirent * ent;
while ((ent = readdir(dir)) != NULL) {
printf("%s\n", ent->d_name);
}
closedir(dir);
pid_t pid = fork();
if(pid > 0){
printf("Fork Parent %d = %d\n", getpid(), pid);
}else if(pid == 0){
printf("Fork Child %d = %d\n", getpid(), pid);
} else {
printf("Fork Failed %d = %d\n", getpid(), pid);
}
}else{
printf("Opendir Failed\n");
}
} else {
printf("Malloc Failed\n");
}
} else {
printf("clock_gettime Failed\n");
}
return 0;
}
|
Implement PID, fix forking in C
|
Implement PID, fix forking in C
|
C
|
mit
|
redox-os/libc,redox-os/libc,redox-os/libc
|
c
|
## Code Before:
int main(int argc, char ** argv){
printf("Test %d\n", argc);
int i;
for(i = 0; i < argc; i++){
printf("%d: %s\n", i, argv[i]);
}
struct timeval tv;
if(gettimeofday(&tv, NULL) == 0){
printf("Gettimeofday %d %d\n", tv.tv_sec, tv.tv_usec);
void* test = malloc(1024*1024);
if(test > 0){
printf("Malloc %x\n", test);
free(test);
printf("Free\n");
DIR * dir = opendir("file:///");
if (dir != NULL) {
struct dirent * ent;
while ((ent = readdir(dir)) != NULL) {
printf("%s\n", ent->d_name);
}
closedir(dir);
pid_t pid = fork();
if(pid == 0){
printf("Fork Parent\n");
}else if(pid > 0){
printf("Fork Child %d\n", pid);
} else {
printf("Fork Failed\n");
}
}else{
printf("Opendir Failed\n");
}
} else {
printf("Malloc Failed\n");
}
} else {
printf("Gettimeofday Failed\n");
}
return 0;
}
## Instruction:
Implement PID, fix forking in C
## Code After:
int main(int argc, char ** argv){
printf("Test %d\n", argc);
int i;
for(i = 0; i < argc; i++){
printf("%d: %s\n", i, argv[i]);
}
struct timespec tp;
if(clock_gettime(CLOCK_REALTIME, &tp) == 0){
printf("clock_gettime %d %d\n", tp.tv_sec, tp.tv_nsec);
void* test = malloc(1024*1024);
if(test > 0){
printf("Malloc %x\n", test);
free(test);
printf("Free\n");
DIR * dir = opendir("file:///");
if (dir != NULL) {
struct dirent * ent;
while ((ent = readdir(dir)) != NULL) {
printf("%s\n", ent->d_name);
}
closedir(dir);
pid_t pid = fork();
if(pid > 0){
printf("Fork Parent %d = %d\n", getpid(), pid);
}else if(pid == 0){
printf("Fork Child %d = %d\n", getpid(), pid);
} else {
printf("Fork Failed %d = %d\n", getpid(), pid);
}
}else{
printf("Opendir Failed\n");
}
} else {
printf("Malloc Failed\n");
}
} else {
printf("clock_gettime Failed\n");
}
return 0;
}
|
...
for(i = 0; i < argc; i++){
printf("%d: %s\n", i, argv[i]);
}
struct timespec tp;
if(clock_gettime(CLOCK_REALTIME, &tp) == 0){
printf("clock_gettime %d %d\n", tp.tv_sec, tp.tv_nsec);
void* test = malloc(1024*1024);
if(test > 0){
printf("Malloc %x\n", test);
...
closedir(dir);
pid_t pid = fork();
if(pid > 0){
printf("Fork Parent %d = %d\n", getpid(), pid);
}else if(pid == 0){
printf("Fork Child %d = %d\n", getpid(), pid);
} else {
printf("Fork Failed %d = %d\n", getpid(), pid);
}
}else{
printf("Opendir Failed\n");
...
printf("Malloc Failed\n");
}
} else {
printf("clock_gettime Failed\n");
}
return 0;
}
...
|
00ea750c0d8375733c4a5a9c0d1bc85bd5010913
|
Client/Core/src/main/java/me/footlights/core/data/store/MemoryStore.java
|
Client/Core/src/main/java/me/footlights/core/data/store/MemoryStore.java
|
package me.footlights.core.data.store;
import java.nio.ByteBuffer;
import java.util.*;
/** A block store in memory. */
public class MemoryStore extends LocalStore
{
public MemoryStore()
{
super(null);
blocks = new HashMap<String,ByteBuffer>();
}
@Override
public AbstractCollection<String> list()
{
return (AbstractSet<String>) blocks.keySet();
}
@Override
public void put(String name, ByteBuffer bytes)
{
blocks.put(name, bytes);
}
@Override
public ByteBuffer get(String name)
{
return blocks.get(name).asReadOnlyBuffer();
}
@Override
public void flush() { /* do nothing; this class always blocks */ }
/** That actual block store */
private Map<String,ByteBuffer> blocks;
}
|
package me.footlights.core.data.store;
import java.nio.ByteBuffer;
import java.util.*;
import me.footlights.core.data.NoSuchBlockException;
/** A block store in memory. */
public class MemoryStore extends LocalStore
{
public MemoryStore()
{
super(null);
blocks = new HashMap<String,ByteBuffer>();
}
@Override
public AbstractCollection<String> list()
{
return (AbstractSet<String>) blocks.keySet();
}
@Override
public void put(String name, ByteBuffer bytes)
{
if (name == null) throw new NullPointerException();
blocks.put(name, bytes);
}
@Override public ByteBuffer get(String name) throws NoSuchBlockException
{
ByteBuffer buffer = blocks.get(name);
if (buffer == null) throw new NoSuchBlockException(this, name);
else return buffer.asReadOnlyBuffer();
}
@Override
public void flush() { /* do nothing; this class always blocks */ }
/** That actual block store */
private Map<String,ByteBuffer> blocks;
}
|
Throw exception rather than return null.
|
Throw exception rather than return null.
In Store, we should check for badness (e.g. null keys) and, rather than return null if we can't find a block [or throw a NullPointerException], throw a NoSuchBlockException.
|
Java
|
apache-2.0
|
trombonehero/Footlights,trombonehero/Footlights,nasrallahmounir/Footlights,trombonehero/Footlights,nasrallahmounir/Footlights,trombonehero/Footlights,nasrallahmounir/Footlights,nasrallahmounir/Footlights
|
java
|
## Code Before:
package me.footlights.core.data.store;
import java.nio.ByteBuffer;
import java.util.*;
/** A block store in memory. */
public class MemoryStore extends LocalStore
{
public MemoryStore()
{
super(null);
blocks = new HashMap<String,ByteBuffer>();
}
@Override
public AbstractCollection<String> list()
{
return (AbstractSet<String>) blocks.keySet();
}
@Override
public void put(String name, ByteBuffer bytes)
{
blocks.put(name, bytes);
}
@Override
public ByteBuffer get(String name)
{
return blocks.get(name).asReadOnlyBuffer();
}
@Override
public void flush() { /* do nothing; this class always blocks */ }
/** That actual block store */
private Map<String,ByteBuffer> blocks;
}
## Instruction:
Throw exception rather than return null.
In Store, we should check for badness (e.g. null keys) and, rather than return null if we can't find a block [or throw a NullPointerException], throw a NoSuchBlockException.
## Code After:
package me.footlights.core.data.store;
import java.nio.ByteBuffer;
import java.util.*;
import me.footlights.core.data.NoSuchBlockException;
/** A block store in memory. */
public class MemoryStore extends LocalStore
{
public MemoryStore()
{
super(null);
blocks = new HashMap<String,ByteBuffer>();
}
@Override
public AbstractCollection<String> list()
{
return (AbstractSet<String>) blocks.keySet();
}
@Override
public void put(String name, ByteBuffer bytes)
{
if (name == null) throw new NullPointerException();
blocks.put(name, bytes);
}
@Override public ByteBuffer get(String name) throws NoSuchBlockException
{
ByteBuffer buffer = blocks.get(name);
if (buffer == null) throw new NoSuchBlockException(this, name);
else return buffer.asReadOnlyBuffer();
}
@Override
public void flush() { /* do nothing; this class always blocks */ }
/** That actual block store */
private Map<String,ByteBuffer> blocks;
}
|
...
import java.nio.ByteBuffer;
import java.util.*;
import me.footlights.core.data.NoSuchBlockException;
...
@Override
public void put(String name, ByteBuffer bytes)
{
if (name == null) throw new NullPointerException();
blocks.put(name, bytes);
}
@Override public ByteBuffer get(String name) throws NoSuchBlockException
{
ByteBuffer buffer = blocks.get(name);
if (buffer == null) throw new NoSuchBlockException(this, name);
else return buffer.asReadOnlyBuffer();
}
@Override
...
|
e513e41dd10df009a3db7641774db1acba60a301
|
tensormate/graph/__init__.py
|
tensormate/graph/__init__.py
|
from tensormate.graph.base import *
from tensormate.graph.data_pipeline import *
|
from tensormate.graph.base import *
from tensormate.graph.data_pipeline import *
from tensormate.graph.image_graph import *
|
Add an access from graph
|
Add an access from graph
|
Python
|
apache-2.0
|
songgc/tensormate
|
python
|
## Code Before:
from tensormate.graph.base import *
from tensormate.graph.data_pipeline import *
## Instruction:
Add an access from graph
## Code After:
from tensormate.graph.base import *
from tensormate.graph.data_pipeline import *
from tensormate.graph.image_graph import *
|
// ... existing code ...
from tensormate.graph.base import *
from tensormate.graph.data_pipeline import *
from tensormate.graph.image_graph import *
// ... rest of the code ...
|
8126ca21bcf8da551906eff348c92cb71fe79e6e
|
readthedocs/doc_builder/base.py
|
readthedocs/doc_builder/base.py
|
import os
def restoring_chdir(fn):
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
"""
@restoring_chdir
def force(self, version):
"""
An optional step to force a build even when nothing has changed.
"""
print "Forcing a build by touching files"
os.chdir(version.project.conf_dir(version.slug))
os.system('touch * && touch */*')
def clean(self, version):
"""
Clean up the version so it's ready for usage.
This is used to add RTD specific stuff to Sphinx, and to
implement whitelists on projects as well.
It is guaranteed to be called before your project is built.
"""
raise NotImplementedError
def build(self, version):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, version):
"""
Move the documentation from it's generated place to its final home.
This needs to understand both a single server dev environment,
as well as a multi-server environment.
"""
raise NotImplementedError
|
import os
from functools import wraps
def restoring_chdir(fn):
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
"""
@restoring_chdir
def force(self, version):
"""
An optional step to force a build even when nothing has changed.
"""
print "Forcing a build by touching files"
os.chdir(version.project.conf_dir(version.slug))
os.system('touch * && touch */*')
def clean(self, version):
"""
Clean up the version so it's ready for usage.
This is used to add RTD specific stuff to Sphinx, and to
implement whitelists on projects as well.
It is guaranteed to be called before your project is built.
"""
raise NotImplementedError
def build(self, version):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, version):
"""
Move the documentation from it's generated place to its final home.
This needs to understand both a single server dev environment,
as well as a multi-server environment.
"""
raise NotImplementedError
|
Call wraps on the restoring_chdir decorator.
|
Call wraps on the restoring_chdir decorator.
|
Python
|
mit
|
alex/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,alex/readthedocs.org,tddv/readthedocs.org,dirn/readthedocs.org,takluyver/readthedocs.org,nikolas/readthedocs.org,LukasBoersma/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,kdkeyser/readthedocs.org,asampat3090/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,sunnyzwh/readthedocs.org,clarkperkins/readthedocs.org,gjtorikian/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,gjtorikian/readthedocs.org,KamranMackey/readthedocs.org,atsuyim/readthedocs.org,gjtorikian/readthedocs.org,takluyver/readthedocs.org,Carreau/readthedocs.org,johncosta/private-readthedocs.org,Carreau/readthedocs.org,emawind84/readthedocs.org,soulshake/readthedocs.org,KamranMackey/readthedocs.org,wijerasa/readthedocs.org,Tazer/readthedocs.org,agjohnson/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,sils1297/readthedocs.org,emawind84/readthedocs.org,hach-que/readthedocs.org,sid-kap/readthedocs.org,mhils/readthedocs.org,wanghaven/readthedocs.org,asampat3090/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,michaelmcandrew/readthedocs.org,GovReady/readthedocs.org,KamranMackey/readthedocs.org,nikolas/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,agjohnson/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,wanghaven/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,emawind84/readthedocs.org,stevepiercy/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,attakei/readthedocs-oauth,fujita-shintaro/readthedocs.org,pombredanne/readthedocs.org,kenwang76/readthedocs.org,kenshinthebattosai/readthedocs.org,rtfd/readthedocs.org,kenshinthebattosai/readthedocs.org,raven47git/readthedocs.org,johncosta/private-readthedocs.org,laplaceliu/readthedocs.org,agjohnson/readthedocs.org,nyergler/pythonslides,michaelmcandrew/readthedocs.org,laplaceliu/readthedocs.org,Tazer/readthedocs.org,safwanrahman/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,singingwolfboy/readthedocs.org,kdkeyser/readthedocs.org,LukasBoersma/readthedocs.org,emawind84/readthedocs.org,raven47git/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,cgourlay/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,raven47git/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,kenshinthebattosai/readthedocs.org,d0ugal/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,mrshoki/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,ojii/readthedocs.org,fujita-shintaro/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,CedarLogic/readthedocs.org,asampat3090/readthedocs.org,hach-que/readthedocs.org,michaelmcandrew/readthedocs.org,jerel/readthedocs.org,cgourlay/readthedocs.org,atsuyim/readthedocs.org,royalwang/readthedocs.org,johncosta/private-readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,nikolas/readthedocs.org,takluyver/readthedocs.org,sid-kap/readthedocs.org,mrshoki/readthedocs.org,nikolas/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org,Carreau/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,ojii/readthedocs.org,jerel/readthedocs.org,VishvajitP/readthedocs.org,laplaceliu/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,alex/readthedocs.org,VishvajitP/readthedocs.org,dirn/readthedocs.org,GovReady/readthedocs.org,sunnyzwh/readthedocs.org,jerel/readthedocs.org,rtfd/readthedocs.org,nyergler/pythonslides,sils1297/readthedocs.org,kdkeyser/readthedocs.org,attakei/readthedocs-oauth,ojii/readthedocs.org,alex/readthedocs.org,clarkperkins/readthedocs.org,fujita-shintaro/readthedocs.org,clarkperkins/readthedocs.org,nyergler/pythonslides,raven47git/readthedocs.org,sunnyzwh/readthedocs.org,davidfischer/readthedocs.org,GovReady/readthedocs.org,titiushko/readthedocs.org,asampat3090/readthedocs.org,techtonik/readthedocs.org,attakei/readthedocs-oauth,royalwang/readthedocs.org,stevepiercy/readthedocs.org,laplaceliu/readthedocs.org,titiushko/readthedocs.org,sid-kap/readthedocs.org,ojii/readthedocs.org,nyergler/pythonslides,kenwang76/readthedocs.org,davidfischer/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,stevepiercy/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,SteveViss/readthedocs.org,soulshake/readthedocs.org,techtonik/readthedocs.org
|
python
|
## Code Before:
import os
def restoring_chdir(fn):
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
"""
@restoring_chdir
def force(self, version):
"""
An optional step to force a build even when nothing has changed.
"""
print "Forcing a build by touching files"
os.chdir(version.project.conf_dir(version.slug))
os.system('touch * && touch */*')
def clean(self, version):
"""
Clean up the version so it's ready for usage.
This is used to add RTD specific stuff to Sphinx, and to
implement whitelists on projects as well.
It is guaranteed to be called before your project is built.
"""
raise NotImplementedError
def build(self, version):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, version):
"""
Move the documentation from it's generated place to its final home.
This needs to understand both a single server dev environment,
as well as a multi-server environment.
"""
raise NotImplementedError
## Instruction:
Call wraps on the restoring_chdir decorator.
## Code After:
import os
from functools import wraps
def restoring_chdir(fn):
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
return fn(*args, **kw)
finally:
os.chdir(path)
return decorator
class BaseBuilder(object):
"""
The Base for all Builders. Defines the API for subclasses.
"""
@restoring_chdir
def force(self, version):
"""
An optional step to force a build even when nothing has changed.
"""
print "Forcing a build by touching files"
os.chdir(version.project.conf_dir(version.slug))
os.system('touch * && touch */*')
def clean(self, version):
"""
Clean up the version so it's ready for usage.
This is used to add RTD specific stuff to Sphinx, and to
implement whitelists on projects as well.
It is guaranteed to be called before your project is built.
"""
raise NotImplementedError
def build(self, version):
"""
Do the actual building of the documentation.
"""
raise NotImplementedError
def move(self, version):
"""
Move the documentation from it's generated place to its final home.
This needs to understand both a single server dev environment,
as well as a multi-server environment.
"""
raise NotImplementedError
|
// ... existing code ...
import os
from functools import wraps
def restoring_chdir(fn):
@wraps(fn)
def decorator(*args, **kw):
try:
path = os.getcwd()
// ... rest of the code ...
|
70a260148ff33185ce8e70b3dbd46a15c0ff5aaf
|
cc2/arch/qbe/code.c
|
cc2/arch/qbe/code.c
|
void
defsym(Symbol *sym, int alloc)
{
}
void
data(Node *np)
{
}
void
writeout(void)
{
}
|
/*
* : is for user-defined Aggregate Types
* $ is for globals (represented by a pointer)
* % is for function-scope temporaries
* @ is for block labels
*/
static char
sigil(Symbol *sym)
{
switch (sym->kind) {
case EXTRN:
case GLOB:
case PRIVAT:
case LOCAL:
return '$';
case AUTO:
case REG:
return '%';
default:
abort();
}
}
static void
size2asm(Type *tp)
{
char *s;
if (tp->flags & STRF) {
abort();
} else {
switch (tp->size) {
case 1:
s = "b\t";
break;
case 2:
s = "h\t";
break;
case 4:
s = "w\t";
break;
case 8:
s = "q\t";
break;
default:
s = "z\t%llu\t";
break;
}
}
printf(s, (unsigned long long) tp->size);
}
void
defsym(Symbol *sym, int alloc)
{
if (!alloc)
return;
if (sym->kind == GLOB)
fputs("export ", stdout);
printf("data %c%s = {\n", sigil(sym), sym->name);
if (sym->type.flags & INITF)
return;
putchar('\t');
size2asm(&sym->type);
puts("0\n}");
}
void
data(Node *np)
{
}
void
writeout(void)
{
}
|
Add basic implementation of defsym()
|
[cc2] Add basic implementation of defsym()
This is a first implementation which a limited implementation of
sigil() and of size2asm() that, for instance, does not support
strings.
|
C
|
isc
|
k0gaMSX/scc,k0gaMSX/scc,k0gaMSX/scc
|
c
|
## Code Before:
void
defsym(Symbol *sym, int alloc)
{
}
void
data(Node *np)
{
}
void
writeout(void)
{
}
## Instruction:
[cc2] Add basic implementation of defsym()
This is a first implementation which a limited implementation of
sigil() and of size2asm() that, for instance, does not support
strings.
## Code After:
/*
* : is for user-defined Aggregate Types
* $ is for globals (represented by a pointer)
* % is for function-scope temporaries
* @ is for block labels
*/
static char
sigil(Symbol *sym)
{
switch (sym->kind) {
case EXTRN:
case GLOB:
case PRIVAT:
case LOCAL:
return '$';
case AUTO:
case REG:
return '%';
default:
abort();
}
}
static void
size2asm(Type *tp)
{
char *s;
if (tp->flags & STRF) {
abort();
} else {
switch (tp->size) {
case 1:
s = "b\t";
break;
case 2:
s = "h\t";
break;
case 4:
s = "w\t";
break;
case 8:
s = "q\t";
break;
default:
s = "z\t%llu\t";
break;
}
}
printf(s, (unsigned long long) tp->size);
}
void
defsym(Symbol *sym, int alloc)
{
if (!alloc)
return;
if (sym->kind == GLOB)
fputs("export ", stdout);
printf("data %c%s = {\n", sigil(sym), sym->name);
if (sym->type.flags & INITF)
return;
putchar('\t');
size2asm(&sym->type);
puts("0\n}");
}
void
data(Node *np)
{
}
void
writeout(void)
{
}
|
...
/*
* : is for user-defined Aggregate Types
* $ is for globals (represented by a pointer)
* % is for function-scope temporaries
* @ is for block labels
*/
static char
sigil(Symbol *sym)
{
switch (sym->kind) {
case EXTRN:
case GLOB:
case PRIVAT:
case LOCAL:
return '$';
case AUTO:
case REG:
return '%';
default:
abort();
}
}
static void
size2asm(Type *tp)
{
char *s;
if (tp->flags & STRF) {
abort();
} else {
switch (tp->size) {
case 1:
s = "b\t";
break;
case 2:
s = "h\t";
break;
case 4:
s = "w\t";
break;
case 8:
s = "q\t";
break;
default:
s = "z\t%llu\t";
break;
}
}
printf(s, (unsigned long long) tp->size);
}
void
defsym(Symbol *sym, int alloc)
{
if (!alloc)
return;
if (sym->kind == GLOB)
fputs("export ", stdout);
printf("data %c%s = {\n", sigil(sym), sym->name);
if (sym->type.flags & INITF)
return;
putchar('\t');
size2asm(&sym->type);
puts("0\n}");
}
void
...
|
b94bcd4c0aa74f4f963aec368f8417add005fafe
|
lilthumb.h
|
lilthumb.h
|
namespace lilthumb{
std::string timeString()
{
time_t rawtime;
struct tm * timeinfo;
char buffer[80];
time (&rawtime);
timeinfo = localtime(&rawtime);
strftime(buffer,80,"%d-%m-%Y %I:%M:%S",timeinfo);
std::string str(buffer);
return str;
}
}
#define logger(stream,message) stream << lilthumb::timeString() << " | "<< message << std::endl;
#endif
|
namespace lilthumb{
std::string timeString()
{
time_t rawtime;
struct tm * timeinfo;
char buffer[80];
time (&rawtime);
timeinfo = localtime(&rawtime);
strftime(buffer,80,"%d-%m-%Y %I:%M:%S",timeinfo);
std::string str(buffer);
return str;
}
void stone( std::ostream& stream, std::string message )
{
stream << timeString() << " | " << message << std::endl;
}
}
#endif
|
Change logger define to namespace stone function
|
Change logger define to namespace stone function
|
C
|
apache-2.0
|
jeromevelut/lilthumb
|
c
|
## Code Before:
namespace lilthumb{
std::string timeString()
{
time_t rawtime;
struct tm * timeinfo;
char buffer[80];
time (&rawtime);
timeinfo = localtime(&rawtime);
strftime(buffer,80,"%d-%m-%Y %I:%M:%S",timeinfo);
std::string str(buffer);
return str;
}
}
#define logger(stream,message) stream << lilthumb::timeString() << " | "<< message << std::endl;
#endif
## Instruction:
Change logger define to namespace stone function
## Code After:
namespace lilthumb{
std::string timeString()
{
time_t rawtime;
struct tm * timeinfo;
char buffer[80];
time (&rawtime);
timeinfo = localtime(&rawtime);
strftime(buffer,80,"%d-%m-%Y %I:%M:%S",timeinfo);
std::string str(buffer);
return str;
}
void stone( std::ostream& stream, std::string message )
{
stream << timeString() << " | " << message << std::endl;
}
}
#endif
|
# ... existing code ...
namespace lilthumb{
std::string timeString()
# ... modified code ...
return str;
}
void stone( std::ostream& stream, std::string message )
{
stream << timeString() << " | " << message << std::endl;
}
}
#endif
# ... rest of the code ...
|
8a8d4905c169b9a1060f1283d0286c433af24f43
|
word2gauss/words.py
|
word2gauss/words.py
|
from itertools import islice
from .embeddings import text_to_pairs
def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5):
'''
Convert a document stream to batches of pairs used for training embeddings.
iter_pairs is a generator that yields batches of pairs that can
be passed to GaussianEmbedding.train
fin = an iterator of documents / sentences (e.g. a file like object)
Each element is a string of raw text
vocab = something implementing the Vocabulary interface
batch_size = size of batches
window = Number of words to the left and right of center word to include
as positive pairs
nsamples = number of negative samples to drawn for each center word
'''
documents = iter(fin)
batch = list(islice(documents, batch_size))
while len(batch) > 0:
text = [
vocab.tokenize(doc, remove_oov=False, return_ids=True)
for doc in batch
]
pairs = text_to_pairs(text, vocab.random_ids,
nsamples_per_word=nsamples,
half_window_size=window)
yield pairs
batch = list(islice(documents, batch_size))
|
from itertools import islice
from .embeddings import text_to_pairs
def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5):
'''
Convert a document stream to batches of pairs used for training embeddings.
iter_pairs is a generator that yields batches of pairs that can
be passed to GaussianEmbedding.train
fin = an iterator of documents / sentences (e.g. a file like object)
Each element is a string of raw text
vocab = something implementing the Vocabulary interface
batch_size = size of batches
window = Number of words to the left and right of center word to include
as positive pairs
nsamples = number of negative samples to drawn for each center word
'''
documents = iter(fin)
batch = list(islice(documents, batch_size))
while len(batch) > 0:
text = [
vocab.tokenize_ids(doc, remove_oov=False)
for doc in batch
]
pairs = text_to_pairs(text, vocab.random_ids,
nsamples_per_word=nsamples,
half_window_size=window)
yield pairs
batch = list(islice(documents, batch_size))
|
Change the interface on tokenize in vocabulary
|
Change the interface on tokenize in vocabulary
|
Python
|
mit
|
seomoz/word2gauss,seomoz/word2gauss
|
python
|
## Code Before:
from itertools import islice
from .embeddings import text_to_pairs
def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5):
'''
Convert a document stream to batches of pairs used for training embeddings.
iter_pairs is a generator that yields batches of pairs that can
be passed to GaussianEmbedding.train
fin = an iterator of documents / sentences (e.g. a file like object)
Each element is a string of raw text
vocab = something implementing the Vocabulary interface
batch_size = size of batches
window = Number of words to the left and right of center word to include
as positive pairs
nsamples = number of negative samples to drawn for each center word
'''
documents = iter(fin)
batch = list(islice(documents, batch_size))
while len(batch) > 0:
text = [
vocab.tokenize(doc, remove_oov=False, return_ids=True)
for doc in batch
]
pairs = text_to_pairs(text, vocab.random_ids,
nsamples_per_word=nsamples,
half_window_size=window)
yield pairs
batch = list(islice(documents, batch_size))
## Instruction:
Change the interface on tokenize in vocabulary
## Code After:
from itertools import islice
from .embeddings import text_to_pairs
def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5):
'''
Convert a document stream to batches of pairs used for training embeddings.
iter_pairs is a generator that yields batches of pairs that can
be passed to GaussianEmbedding.train
fin = an iterator of documents / sentences (e.g. a file like object)
Each element is a string of raw text
vocab = something implementing the Vocabulary interface
batch_size = size of batches
window = Number of words to the left and right of center word to include
as positive pairs
nsamples = number of negative samples to drawn for each center word
'''
documents = iter(fin)
batch = list(islice(documents, batch_size))
while len(batch) > 0:
text = [
vocab.tokenize_ids(doc, remove_oov=False)
for doc in batch
]
pairs = text_to_pairs(text, vocab.random_ids,
nsamples_per_word=nsamples,
half_window_size=window)
yield pairs
batch = list(islice(documents, batch_size))
|
// ... existing code ...
batch = list(islice(documents, batch_size))
while len(batch) > 0:
text = [
vocab.tokenize_ids(doc, remove_oov=False)
for doc in batch
]
pairs = text_to_pairs(text, vocab.random_ids,
// ... rest of the code ...
|
cbd2af02e93e7ae902659717135488bc9add218e
|
src/main/java/org/purescript/parser/ParserContext.kt
|
src/main/java/org/purescript/parser/ParserContext.kt
|
@file:Suppress("unused")
package org.purescript.parser
import com.intellij.lang.PsiBuilder
import com.intellij.psi.tree.IElementType
class ParserContext(private val builder: PsiBuilder) {
private val recoverySet = HashMap<IElementType, Int?>()
var isInAttempt = false
private var inOptional = 0
fun eof() = builder.eof()
fun advance() = builder.advanceLexer()
fun addUntilToken(token: IElementType) {
var i = 0
if (recoverySet.containsKey(token)) {
i = recoverySet[token]!!
}
recoverySet[token] = i + 1
}
fun removeUntilToken(token: IElementType) {
val i = recoverySet[token]!!
if (i == 1) {
recoverySet.remove(token)
} else {
recoverySet[token] = i - 1
}
}
fun isUntilToken(token: IElementType) = recoverySet.containsKey(token)
fun enterOptional() = inOptional++
fun exitOptional() = inOptional--
fun isInOptional() = inOptional > 0
fun text() = builder.tokenText ?: ""
fun peek() = builder.tokenType ?: PSTokens.EOF
fun eat(type: IElementType): Boolean {
if (builder.tokenType === type) {
advance()
return true
}
return false
}
fun start(): PsiBuilder.Marker = builder.mark()
val position: Int
get() = builder.currentOffset
fun getText(start: Int, end: Int) =
builder.originalText.subSequence(start, end).toString()
}
|
@file:Suppress("unused")
package org.purescript.parser
import com.intellij.lang.PsiBuilder
import com.intellij.psi.tree.IElementType
class ParserContext(private val builder: PsiBuilder) {
var isInAttempt = false
private var inOptional = 0
fun eof() = builder.eof()
fun advance() = builder.advanceLexer()
fun enterOptional() = inOptional++
fun exitOptional() = inOptional--
fun isInOptional() = inOptional > 0
fun text() = builder.tokenText ?: ""
fun peek() = builder.tokenType ?: PSTokens.EOF
fun eat(type: IElementType): Boolean {
if (builder.tokenType === type) {
advance()
return true
}
return false
}
fun start(): PsiBuilder.Marker = builder.mark()
val position: Int
get() = builder.currentOffset
fun getText(start: Int, end: Int) =
builder.originalText.subSequence(start, end).toString()
}
|
Remove methods and field that is not longer in use
|
Remove methods and field that is not longer in use
|
Kotlin
|
bsd-3-clause
|
intellij-purescript/intellij-purescript,intellij-purescript/intellij-purescript
|
kotlin
|
## Code Before:
@file:Suppress("unused")
package org.purescript.parser
import com.intellij.lang.PsiBuilder
import com.intellij.psi.tree.IElementType
class ParserContext(private val builder: PsiBuilder) {
private val recoverySet = HashMap<IElementType, Int?>()
var isInAttempt = false
private var inOptional = 0
fun eof() = builder.eof()
fun advance() = builder.advanceLexer()
fun addUntilToken(token: IElementType) {
var i = 0
if (recoverySet.containsKey(token)) {
i = recoverySet[token]!!
}
recoverySet[token] = i + 1
}
fun removeUntilToken(token: IElementType) {
val i = recoverySet[token]!!
if (i == 1) {
recoverySet.remove(token)
} else {
recoverySet[token] = i - 1
}
}
fun isUntilToken(token: IElementType) = recoverySet.containsKey(token)
fun enterOptional() = inOptional++
fun exitOptional() = inOptional--
fun isInOptional() = inOptional > 0
fun text() = builder.tokenText ?: ""
fun peek() = builder.tokenType ?: PSTokens.EOF
fun eat(type: IElementType): Boolean {
if (builder.tokenType === type) {
advance()
return true
}
return false
}
fun start(): PsiBuilder.Marker = builder.mark()
val position: Int
get() = builder.currentOffset
fun getText(start: Int, end: Int) =
builder.originalText.subSequence(start, end).toString()
}
## Instruction:
Remove methods and field that is not longer in use
## Code After:
@file:Suppress("unused")
package org.purescript.parser
import com.intellij.lang.PsiBuilder
import com.intellij.psi.tree.IElementType
class ParserContext(private val builder: PsiBuilder) {
var isInAttempt = false
private var inOptional = 0
fun eof() = builder.eof()
fun advance() = builder.advanceLexer()
fun enterOptional() = inOptional++
fun exitOptional() = inOptional--
fun isInOptional() = inOptional > 0
fun text() = builder.tokenText ?: ""
fun peek() = builder.tokenType ?: PSTokens.EOF
fun eat(type: IElementType): Boolean {
if (builder.tokenType === type) {
advance()
return true
}
return false
}
fun start(): PsiBuilder.Marker = builder.mark()
val position: Int
get() = builder.currentOffset
fun getText(start: Int, end: Int) =
builder.originalText.subSequence(start, end).toString()
}
|
...
import com.intellij.psi.tree.IElementType
class ParserContext(private val builder: PsiBuilder) {
var isInAttempt = false
private var inOptional = 0
fun eof() = builder.eof()
...
fun advance() = builder.advanceLexer()
fun enterOptional() = inOptional++
fun exitOptional() = inOptional--
fun isInOptional() = inOptional > 0
...
|
6340d0d14107b1e827108db52ce18ed6c6c630a7
|
18/E18_10/E18_10.java
|
18/E18_10/E18_10.java
|
/*
Write a recursive method that finds the number of occurrences of a specified
letter in a string using the following method header:
public static int count(String str, char a)
For example, count("Welcome", 'e') returns 2. Write a test program that
prompts the user to enter a string and a character, and displays the number
of occurrences for the character in the string.
*/
import java.util.Scanner;
public class E18_10 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter a string: ");
String s = input.nextLine();
System.out.print("Enter a character: ");
char c = input.nextLine().charAt(0);
System.out.println(count(s, c));
}
public static int count(String str, char a) {
if (str.length() > 0) {
int match =
str.toLowerCase().charAt(0) == Character.toLowerCase(a) ? 1 : 0;
return match + count(str.substring(1, str.length()), a);
} else {
return 0;
}
}
}
|
/*
Write a recursive method that finds the number of occurrences of a specified
letter in a string using the following method header:
public static int count(String str, char a)
For example, count("Welcome", 'e') returns 2. Write a test program that
prompts the user to enter a string and a character, and displays the number
of occurrences for the character in the string.
*/
import java.util.Scanner;
public class E18_10 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter a string: ");
String s = input.nextLine();
System.out.print("Enter a character: ");
char c = input.nextLine().charAt(0);
System.out.println(count(s, c));
}
public static int count(String str, char a) {
return count(str, a, 0);
}
private static int count(String str, char a, int index) {
if (index < str.length()) {
int match =
str.toLowerCase().charAt(index) == Character.toLowerCase(a) ? 1 : 0;
return match + count(str, a, index + 1);
} else {
return 0;
}
}
}
|
Use auxilliary recursive method to eliminate need to create new strings via substring method
|
Use auxilliary recursive method to eliminate need to create new strings via substring method
|
Java
|
mit
|
maxalthoff/intro-to-java-exercises
|
java
|
## Code Before:
/*
Write a recursive method that finds the number of occurrences of a specified
letter in a string using the following method header:
public static int count(String str, char a)
For example, count("Welcome", 'e') returns 2. Write a test program that
prompts the user to enter a string and a character, and displays the number
of occurrences for the character in the string.
*/
import java.util.Scanner;
public class E18_10 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter a string: ");
String s = input.nextLine();
System.out.print("Enter a character: ");
char c = input.nextLine().charAt(0);
System.out.println(count(s, c));
}
public static int count(String str, char a) {
if (str.length() > 0) {
int match =
str.toLowerCase().charAt(0) == Character.toLowerCase(a) ? 1 : 0;
return match + count(str.substring(1, str.length()), a);
} else {
return 0;
}
}
}
## Instruction:
Use auxilliary recursive method to eliminate need to create new strings via substring method
## Code After:
/*
Write a recursive method that finds the number of occurrences of a specified
letter in a string using the following method header:
public static int count(String str, char a)
For example, count("Welcome", 'e') returns 2. Write a test program that
prompts the user to enter a string and a character, and displays the number
of occurrences for the character in the string.
*/
import java.util.Scanner;
public class E18_10 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter a string: ");
String s = input.nextLine();
System.out.print("Enter a character: ");
char c = input.nextLine().charAt(0);
System.out.println(count(s, c));
}
public static int count(String str, char a) {
return count(str, a, 0);
}
private static int count(String str, char a, int index) {
if (index < str.length()) {
int match =
str.toLowerCase().charAt(index) == Character.toLowerCase(a) ? 1 : 0;
return match + count(str, a, index + 1);
} else {
return 0;
}
}
}
|
// ... existing code ...
}
public static int count(String str, char a) {
return count(str, a, 0);
}
private static int count(String str, char a, int index) {
if (index < str.length()) {
int match =
str.toLowerCase().charAt(index) == Character.toLowerCase(a) ? 1 : 0;
return match + count(str, a, index + 1);
} else {
return 0;
}
// ... rest of the code ...
|
3624f96f852e276f9fc7731b9c43a9de42dd8983
|
test/helpers-Server.c
|
test/helpers-Server.c
|
char char_of_uint8(uint8_t x) {
return (char) x;
}
uint32_t bufstrcpy(char *dst, const char *src) {
/* The F* precondition guarantees that src is zero-terminated */
return sprintf(dst, "%s", src);
}
uint32_t print_u32(char *dst, uint32_t i) {
return sprintf(dst, "%"PRIu32, i);
}
|
uint32_t bufstrcpy(char *dst, const char *src) {
/* The F* precondition guarantees that src is zero-terminated */
return sprintf(dst, "%s", src);
}
uint32_t print_u32(char *dst, uint32_t i) {
return sprintf(dst, "%"PRIu32, i);
}
|
Remove function now implemented in kremlib
|
Remove function now implemented in kremlib
|
C
|
apache-2.0
|
FStarLang/kremlin,FStarLang/kremlin,FStarLang/kremlin,FStarLang/kremlin
|
c
|
## Code Before:
char char_of_uint8(uint8_t x) {
return (char) x;
}
uint32_t bufstrcpy(char *dst, const char *src) {
/* The F* precondition guarantees that src is zero-terminated */
return sprintf(dst, "%s", src);
}
uint32_t print_u32(char *dst, uint32_t i) {
return sprintf(dst, "%"PRIu32, i);
}
## Instruction:
Remove function now implemented in kremlib
## Code After:
uint32_t bufstrcpy(char *dst, const char *src) {
/* The F* precondition guarantees that src is zero-terminated */
return sprintf(dst, "%s", src);
}
uint32_t print_u32(char *dst, uint32_t i) {
return sprintf(dst, "%"PRIu32, i);
}
|
# ... existing code ...
uint32_t bufstrcpy(char *dst, const char *src) {
/* The F* precondition guarantees that src is zero-terminated */
# ... rest of the code ...
|
3ef0c6adcfa74877245f586618c4592b308976cd
|
openapi_core/wrappers/flask.py
|
openapi_core/wrappers/flask.py
|
"""OpenAPI core wrappers module"""
from openapi_core.wrappers.base import BaseOpenAPIRequest, BaseOpenAPIResponse
class FlaskOpenAPIRequest(BaseOpenAPIRequest):
def __init__(self, request):
self.request = request
@property
def host_url(self):
return self.request.host_url
@property
def path(self):
return self.request.path
@property
def method(self):
return self.request.method.lower()
@property
def path_pattern(self):
if self.request.url_rule is None:
return self.path
return self.request.url_rule.rule
@property
def parameters(self):
return {
'path': self.request.view_args,
'query': self.request.args,
'header': self.request.headers,
'cookie': self.request.cookies,
}
@property
def body(self):
return self.request.data
@property
def mimetype(self):
return self.request.mimetype
class FlaskOpenAPIResponse(BaseOpenAPIResponse):
def __init__(self, response):
self.response = response
@property
def data(self):
return self.response.data
@property
def status_code(self):
return self.response._status_code
@property
def mimetype(self):
return self.response.mimetype
|
"""OpenAPI core wrappers module"""
import re
from openapi_core.wrappers.base import BaseOpenAPIRequest, BaseOpenAPIResponse
# http://flask.pocoo.org/docs/1.0/quickstart/#variable-rules
PATH_PARAMETER_PATTERN = r'<(?:(?:string|int|float|path|uuid):)?(\w+)>'
class FlaskOpenAPIRequest(BaseOpenAPIRequest):
path_regex = re.compile(PATH_PARAMETER_PATTERN)
def __init__(self, request):
self.request = request
@property
def host_url(self):
return self.request.host_url
@property
def path(self):
return self.request.path
@property
def method(self):
return self.request.method.lower()
@property
def path_pattern(self):
if self.request.url_rule is None:
return self.path
return self.path_regex.sub(r'{\1}', self.request.url_rule.rule)
@property
def parameters(self):
return {
'path': self.request.view_args,
'query': self.request.args,
'header': self.request.headers,
'cookie': self.request.cookies,
}
@property
def body(self):
return self.request.data
@property
def mimetype(self):
return self.request.mimetype
class FlaskOpenAPIResponse(BaseOpenAPIResponse):
def __init__(self, response):
self.response = response
@property
def data(self):
return self.response.data
@property
def status_code(self):
return self.response._status_code
@property
def mimetype(self):
return self.response.mimetype
|
Convert Flask path variables to OpenAPI path parameters
|
Convert Flask path variables to OpenAPI path parameters
|
Python
|
bsd-3-clause
|
p1c2u/openapi-core
|
python
|
## Code Before:
"""OpenAPI core wrappers module"""
from openapi_core.wrappers.base import BaseOpenAPIRequest, BaseOpenAPIResponse
class FlaskOpenAPIRequest(BaseOpenAPIRequest):
def __init__(self, request):
self.request = request
@property
def host_url(self):
return self.request.host_url
@property
def path(self):
return self.request.path
@property
def method(self):
return self.request.method.lower()
@property
def path_pattern(self):
if self.request.url_rule is None:
return self.path
return self.request.url_rule.rule
@property
def parameters(self):
return {
'path': self.request.view_args,
'query': self.request.args,
'header': self.request.headers,
'cookie': self.request.cookies,
}
@property
def body(self):
return self.request.data
@property
def mimetype(self):
return self.request.mimetype
class FlaskOpenAPIResponse(BaseOpenAPIResponse):
def __init__(self, response):
self.response = response
@property
def data(self):
return self.response.data
@property
def status_code(self):
return self.response._status_code
@property
def mimetype(self):
return self.response.mimetype
## Instruction:
Convert Flask path variables to OpenAPI path parameters
## Code After:
"""OpenAPI core wrappers module"""
import re
from openapi_core.wrappers.base import BaseOpenAPIRequest, BaseOpenAPIResponse
# http://flask.pocoo.org/docs/1.0/quickstart/#variable-rules
PATH_PARAMETER_PATTERN = r'<(?:(?:string|int|float|path|uuid):)?(\w+)>'
class FlaskOpenAPIRequest(BaseOpenAPIRequest):
path_regex = re.compile(PATH_PARAMETER_PATTERN)
def __init__(self, request):
self.request = request
@property
def host_url(self):
return self.request.host_url
@property
def path(self):
return self.request.path
@property
def method(self):
return self.request.method.lower()
@property
def path_pattern(self):
if self.request.url_rule is None:
return self.path
return self.path_regex.sub(r'{\1}', self.request.url_rule.rule)
@property
def parameters(self):
return {
'path': self.request.view_args,
'query': self.request.args,
'header': self.request.headers,
'cookie': self.request.cookies,
}
@property
def body(self):
return self.request.data
@property
def mimetype(self):
return self.request.mimetype
class FlaskOpenAPIResponse(BaseOpenAPIResponse):
def __init__(self, response):
self.response = response
@property
def data(self):
return self.response.data
@property
def status_code(self):
return self.response._status_code
@property
def mimetype(self):
return self.response.mimetype
|
# ... existing code ...
"""OpenAPI core wrappers module"""
import re
from openapi_core.wrappers.base import BaseOpenAPIRequest, BaseOpenAPIResponse
# http://flask.pocoo.org/docs/1.0/quickstart/#variable-rules
PATH_PARAMETER_PATTERN = r'<(?:(?:string|int|float|path|uuid):)?(\w+)>'
class FlaskOpenAPIRequest(BaseOpenAPIRequest):
path_regex = re.compile(PATH_PARAMETER_PATTERN)
def __init__(self, request):
self.request = request
# ... modified code ...
if self.request.url_rule is None:
return self.path
return self.path_regex.sub(r'{\1}', self.request.url_rule.rule)
@property
def parameters(self):
# ... rest of the code ...
|
8a40d0df910cf9e17db99155ba148c69737809dc
|
ConnorBrozic-CymonScriptA2P2.py
|
ConnorBrozic-CymonScriptA2P2.py
|
import time
from cymon import Cymon
#Personal Key Removed. Replace 'xxx' with your own key.
api = Cymon('xxx')
#Parsing Text file retrieved from:
#http://stackoverflow.com/questions/6277107/parsing-text-file-in-python
#Open malware domain file.
f = open('text.txt','r')
#Open output file to write to
cymondata = open('cymondata.txt','w')
while True:
try:
#Read the next domain in file.
text = f.readline()
print(text)
#Lookup the domain through Cymon and print results back to output file.
cymondata.write(repr(api.domain_lookup(text)))
cymondata.write("\n")
cymondata.write("\n")
#If 404 error encountered, skip domain and move to the next one.
except Exception:
pass
time.sleep(1)
#Once finished, close the connection.
cymondata.close()
|
import time
from cymon import Cymon
#Personal Key Removed. Replace 'xxx' with your own key.
api = Cymon('xxx')
#Parsing Text file retrieved from:
#http://stackoverflow.com/questions/6277107/parsing-text-file-in-python
#Open malware domain file.
f = open('TestedMalwareDomains.txt','r')
#Open output file to write to
cymondata = open('cymondata.txt','w')
while True:
try:
#Read the next domain in file.
text = f.readline()
print(text)
#Lookup the domain through Cymon and print results back to output file.
cymondata.write(repr(api.domain_lookup(text)))
cymondata.write("\n")
cymondata.write("\n")
#If 404 error encountered, skip domain and move to the next one.
except Exception:
pass
time.sleep(1)
#Once finished, close the connection.
cymondata.close()
|
Update to Script, Fixed malware domain file name
|
Update to Script, Fixed malware domain file name
Fixed malware domains file name to accurately represent the file opened. (Better than text.txt)
|
Python
|
mit
|
ConnorBrozic/SRT411-Assignment2
|
python
|
## Code Before:
import time
from cymon import Cymon
#Personal Key Removed. Replace 'xxx' with your own key.
api = Cymon('xxx')
#Parsing Text file retrieved from:
#http://stackoverflow.com/questions/6277107/parsing-text-file-in-python
#Open malware domain file.
f = open('text.txt','r')
#Open output file to write to
cymondata = open('cymondata.txt','w')
while True:
try:
#Read the next domain in file.
text = f.readline()
print(text)
#Lookup the domain through Cymon and print results back to output file.
cymondata.write(repr(api.domain_lookup(text)))
cymondata.write("\n")
cymondata.write("\n")
#If 404 error encountered, skip domain and move to the next one.
except Exception:
pass
time.sleep(1)
#Once finished, close the connection.
cymondata.close()
## Instruction:
Update to Script, Fixed malware domain file name
Fixed malware domains file name to accurately represent the file opened. (Better than text.txt)
## Code After:
import time
from cymon import Cymon
#Personal Key Removed. Replace 'xxx' with your own key.
api = Cymon('xxx')
#Parsing Text file retrieved from:
#http://stackoverflow.com/questions/6277107/parsing-text-file-in-python
#Open malware domain file.
f = open('TestedMalwareDomains.txt','r')
#Open output file to write to
cymondata = open('cymondata.txt','w')
while True:
try:
#Read the next domain in file.
text = f.readline()
print(text)
#Lookup the domain through Cymon and print results back to output file.
cymondata.write(repr(api.domain_lookup(text)))
cymondata.write("\n")
cymondata.write("\n")
#If 404 error encountered, skip domain and move to the next one.
except Exception:
pass
time.sleep(1)
#Once finished, close the connection.
cymondata.close()
|
// ... existing code ...
#http://stackoverflow.com/questions/6277107/parsing-text-file-in-python
#Open malware domain file.
f = open('TestedMalwareDomains.txt','r')
#Open output file to write to
cymondata = open('cymondata.txt','w')
while True:
// ... rest of the code ...
|
1b3ac2e1dfbc09c6b3fa73ded9c6fb59cd86cac4
|
src/main/java/model/transform/tasks/StoreOptions.java
|
src/main/java/model/transform/tasks/StoreOptions.java
|
package model.transform.tasks;
import model.transform.base.ImageTransformTask;
public class StoreOptions extends ImageTransformTask {
public StoreOptions() {
super("store");
}
public static class Builder {
private StoreOptions storeOptions;
public Builder() {
this.storeOptions = new StoreOptions();
}
public Builder filename(String filename) {
storeOptions.addOption("filename", filename);
return this;
}
public Builder location(String location) {
storeOptions.addOption("location", location);
return this;
}
public Builder path(String path) {
storeOptions.addOption("path", path);
return this;
}
public Builder container(String container) {
storeOptions.addOption("container", container);
return this;
}
public Builder region(String region) {
storeOptions.addOption("region", region);
return this;
}
public Builder access(String access) {
storeOptions.addOption("access", access);
return this;
}
public Builder base64Decode(boolean base64Decode) {
storeOptions.addOption("base64decode", base64Decode);
return this;
}
public StoreOptions build() {
return storeOptions;
}
}
}
|
package model.transform.tasks;
import model.transform.base.ImageTransformTask;
public class StoreOptions extends ImageTransformTask {
// Constructor left public because this task can be used with default options
public StoreOptions() {
super("store");
}
public static class Builder {
private StoreOptions storeOptions;
public Builder() {
this.storeOptions = new StoreOptions();
}
public Builder filename(String filename) {
storeOptions.addOption("filename", filename);
return this;
}
public Builder location(String location) {
storeOptions.addOption("location", location);
return this;
}
public Builder path(String path) {
storeOptions.addOption("path", path);
return this;
}
public Builder container(String container) {
storeOptions.addOption("container", container);
return this;
}
public Builder region(String region) {
storeOptions.addOption("region", region);
return this;
}
public Builder access(String access) {
storeOptions.addOption("access", access);
return this;
}
public Builder base64Decode(boolean base64Decode) {
storeOptions.addOption("base64decode", base64Decode);
return this;
}
public StoreOptions build() {
return storeOptions;
}
}
}
|
Add comment to clarify constructor access
|
Add comment to clarify constructor access
|
Java
|
apache-2.0
|
filestack/filestack-java,filestack/filestack-java
|
java
|
## Code Before:
package model.transform.tasks;
import model.transform.base.ImageTransformTask;
public class StoreOptions extends ImageTransformTask {
public StoreOptions() {
super("store");
}
public static class Builder {
private StoreOptions storeOptions;
public Builder() {
this.storeOptions = new StoreOptions();
}
public Builder filename(String filename) {
storeOptions.addOption("filename", filename);
return this;
}
public Builder location(String location) {
storeOptions.addOption("location", location);
return this;
}
public Builder path(String path) {
storeOptions.addOption("path", path);
return this;
}
public Builder container(String container) {
storeOptions.addOption("container", container);
return this;
}
public Builder region(String region) {
storeOptions.addOption("region", region);
return this;
}
public Builder access(String access) {
storeOptions.addOption("access", access);
return this;
}
public Builder base64Decode(boolean base64Decode) {
storeOptions.addOption("base64decode", base64Decode);
return this;
}
public StoreOptions build() {
return storeOptions;
}
}
}
## Instruction:
Add comment to clarify constructor access
## Code After:
package model.transform.tasks;
import model.transform.base.ImageTransformTask;
public class StoreOptions extends ImageTransformTask {
// Constructor left public because this task can be used with default options
public StoreOptions() {
super("store");
}
public static class Builder {
private StoreOptions storeOptions;
public Builder() {
this.storeOptions = new StoreOptions();
}
public Builder filename(String filename) {
storeOptions.addOption("filename", filename);
return this;
}
public Builder location(String location) {
storeOptions.addOption("location", location);
return this;
}
public Builder path(String path) {
storeOptions.addOption("path", path);
return this;
}
public Builder container(String container) {
storeOptions.addOption("container", container);
return this;
}
public Builder region(String region) {
storeOptions.addOption("region", region);
return this;
}
public Builder access(String access) {
storeOptions.addOption("access", access);
return this;
}
public Builder base64Decode(boolean base64Decode) {
storeOptions.addOption("base64decode", base64Decode);
return this;
}
public StoreOptions build() {
return storeOptions;
}
}
}
|
// ... existing code ...
public class StoreOptions extends ImageTransformTask {
// Constructor left public because this task can be used with default options
public StoreOptions() {
super("store");
}
// ... rest of the code ...
|
316dac037b8cef3086f5bdf6b9fd2afa0b2bfbd3
|
mama_cas/urls.py
|
mama_cas/urls.py
|
from django.conf.urls import patterns
from django.conf.urls import url
from mama_cas.views import LoginView
from mama_cas.views import LogoutView
from mama_cas.views import ValidateView
from mama_cas.views import ServiceValidateView
from mama_cas.views import ProxyValidateView
from mama_cas.views import ProxyView
from mama_cas.views import WarnView
from mama_cas.views import SamlValidateView
urlpatterns = patterns('',
url(r'^login/?$',
LoginView.as_view(),
name='cas_login'),
url(r'^logout/?$',
LogoutView.as_view(),
name='cas_logout'),
url(r'^validate/?$',
ValidateView.as_view(),
name='cas_validate'),
url(r'^serviceValidate/?$',
ServiceValidateView.as_view(),
name='cas_service_validate'),
url(r'^proxyValidate/?$',
ProxyValidateView.as_view(),
name='cas_proxy_validate'),
url(r'^proxy/?$',
ProxyView.as_view(),
name='cas_proxy'),
url(r'^warn/?$',
WarnView.as_view(),
name='cas_warn'),
url(r'^samlValidate/?$',
SamlValidateView.as_view(),
name='cas_saml_validate'),
)
|
from django.conf.urls import patterns
from django.conf.urls import url
from mama_cas.views import LoginView
from mama_cas.views import LogoutView
from mama_cas.views import ValidateView
from mama_cas.views import ServiceValidateView
from mama_cas.views import ProxyValidateView
from mama_cas.views import ProxyView
from mama_cas.views import WarnView
from mama_cas.views import SamlValidateView
urlpatterns = patterns('',
url(r'^login/?$',
LoginView.as_view(),
name='cas_login'),
url(r'^logout/?$',
LogoutView.as_view(),
name='cas_logout'),
url(r'^validate/?$',
ValidateView.as_view(),
name='cas_validate'),
url(r'^serviceValidate/?$',
ServiceValidateView.as_view(),
name='cas_service_validate'),
url(r'^proxyValidate/?$',
ProxyValidateView.as_view(),
name='cas_proxy_validate'),
url(r'^proxy/?$',
ProxyView.as_view(),
name='cas_proxy'),
url(r'^p3/serviceValidate/?$',
ServiceValidateView.as_view(),
name='cas_p3_service_validate'),
url(r'^p3/proxyValidate/?$',
ProxyValidateView.as_view(),
name='cas_p3_proxy_validate'),
url(r'^warn/?$',
WarnView.as_view(),
name='cas_warn'),
url(r'^samlValidate/?$',
SamlValidateView.as_view(),
name='cas_saml_validate'),
)
|
Add CAS 3.0 specific endpoints
|
Add CAS 3.0 specific endpoints
|
Python
|
bsd-3-clause
|
jbittel/django-mama-cas,jbittel/django-mama-cas,orbitvu/django-mama-cas,orbitvu/django-mama-cas
|
python
|
## Code Before:
from django.conf.urls import patterns
from django.conf.urls import url
from mama_cas.views import LoginView
from mama_cas.views import LogoutView
from mama_cas.views import ValidateView
from mama_cas.views import ServiceValidateView
from mama_cas.views import ProxyValidateView
from mama_cas.views import ProxyView
from mama_cas.views import WarnView
from mama_cas.views import SamlValidateView
urlpatterns = patterns('',
url(r'^login/?$',
LoginView.as_view(),
name='cas_login'),
url(r'^logout/?$',
LogoutView.as_view(),
name='cas_logout'),
url(r'^validate/?$',
ValidateView.as_view(),
name='cas_validate'),
url(r'^serviceValidate/?$',
ServiceValidateView.as_view(),
name='cas_service_validate'),
url(r'^proxyValidate/?$',
ProxyValidateView.as_view(),
name='cas_proxy_validate'),
url(r'^proxy/?$',
ProxyView.as_view(),
name='cas_proxy'),
url(r'^warn/?$',
WarnView.as_view(),
name='cas_warn'),
url(r'^samlValidate/?$',
SamlValidateView.as_view(),
name='cas_saml_validate'),
)
## Instruction:
Add CAS 3.0 specific endpoints
## Code After:
from django.conf.urls import patterns
from django.conf.urls import url
from mama_cas.views import LoginView
from mama_cas.views import LogoutView
from mama_cas.views import ValidateView
from mama_cas.views import ServiceValidateView
from mama_cas.views import ProxyValidateView
from mama_cas.views import ProxyView
from mama_cas.views import WarnView
from mama_cas.views import SamlValidateView
urlpatterns = patterns('',
url(r'^login/?$',
LoginView.as_view(),
name='cas_login'),
url(r'^logout/?$',
LogoutView.as_view(),
name='cas_logout'),
url(r'^validate/?$',
ValidateView.as_view(),
name='cas_validate'),
url(r'^serviceValidate/?$',
ServiceValidateView.as_view(),
name='cas_service_validate'),
url(r'^proxyValidate/?$',
ProxyValidateView.as_view(),
name='cas_proxy_validate'),
url(r'^proxy/?$',
ProxyView.as_view(),
name='cas_proxy'),
url(r'^p3/serviceValidate/?$',
ServiceValidateView.as_view(),
name='cas_p3_service_validate'),
url(r'^p3/proxyValidate/?$',
ProxyValidateView.as_view(),
name='cas_p3_proxy_validate'),
url(r'^warn/?$',
WarnView.as_view(),
name='cas_warn'),
url(r'^samlValidate/?$',
SamlValidateView.as_view(),
name='cas_saml_validate'),
)
|
// ... existing code ...
url(r'^proxy/?$',
ProxyView.as_view(),
name='cas_proxy'),
url(r'^p3/serviceValidate/?$',
ServiceValidateView.as_view(),
name='cas_p3_service_validate'),
url(r'^p3/proxyValidate/?$',
ProxyValidateView.as_view(),
name='cas_p3_proxy_validate'),
url(r'^warn/?$',
WarnView.as_view(),
name='cas_warn'),
// ... rest of the code ...
|
0c100e305b912eaa9d100e39b2f202a95043802f
|
test/competitive/programming/timemanagement/TimerTest.java
|
test/competitive/programming/timemanagement/TimerTest.java
|
package competitive.programming.timemanagement;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Test;
import competitive.programming.timemanagement.TimeoutException;
import competitive.programming.timemanagement.Timer;
public class TimerTest {
@Test
public void nonStartedTimerDoesNotTimeout() {
Timer timer = new Timer();
try {
timer.timeCheck();
sleep(1);
timer.timeCheck();
} catch (TimeoutException e) {
fail();
}
}
@Test
public void doesNotTimeOutBeforeTimeoutReached() {
Timer timer = new Timer();
try {
timer.startTimer(2);
sleep(1);
long currentTimeTaken = timer.currentTimeTakenInNanoSeconds();
assertTrue(currentTimeTaken>1*1000*1000);
assertTrue(currentTimeTaken<2*1000*1000);
timer.timeCheck();
} catch (TimeoutException e) {
fail();
}
}
@Test(expected = TimeoutException.class)
public void timeoutReached() throws TimeoutException {
Timer timer = new Timer();
timer.startTimer(1);
sleep(2);
timer.timeCheck();
}
private void sleep(long milliseconds){
//I had some difficulties to sleep precisely a number of milliseconds.
//Thread.sleep was not fine...
long start = System.nanoTime();
long timeout = start+milliseconds*1000*1000;
while (System.nanoTime()<timeout){
//do nothing
}
}
}
|
package competitive.programming.timemanagement;
import static org.junit.Assert.fail;
import org.junit.Test;
public class TimerTest {
@Test
public void nonStartedTimerDoesNotTimeout() {
Timer timer = new Timer();
try {
timer.timeCheck();
sleep(1);
timer.timeCheck();
} catch (TimeoutException e) {
fail();
}
}
@Test
public void doesNotTimeOutBeforeTimeoutReached() {
Timer timer = new Timer();
try {
// We have a random failing test here. Try to reproduce the issue
// more frequently.
for (int i = 0; i < 100; i++) {
timer.startTimer(20);
sleep(10);
timer.timeCheck();
}
} catch (TimeoutException e) {
fail();
}
}
@Test(expected = TimeoutException.class)
public void timeoutReached() throws TimeoutException {
Timer timer = new Timer();
timer.startTimer(1);
sleep(2);
timer.timeCheck();
}
private void sleep(long milliseconds) {
// I had some difficulties to sleep precisely a number of milliseconds.
// Thread.sleep was not fine...
long start = System.nanoTime();
long timeout = start + milliseconds * 1000 * 1000;
while (System.nanoTime() < timeout) {
// do nothing
}
}
}
|
Change timer test so that we can reproduce the random issue. Change timings.
|
Change timer test so that we can reproduce the random issue. Change
timings.
|
Java
|
apache-2.0
|
Manwe56/competitive-programming,Manwe56/competitive-programming
|
java
|
## Code Before:
package competitive.programming.timemanagement;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Test;
import competitive.programming.timemanagement.TimeoutException;
import competitive.programming.timemanagement.Timer;
public class TimerTest {
@Test
public void nonStartedTimerDoesNotTimeout() {
Timer timer = new Timer();
try {
timer.timeCheck();
sleep(1);
timer.timeCheck();
} catch (TimeoutException e) {
fail();
}
}
@Test
public void doesNotTimeOutBeforeTimeoutReached() {
Timer timer = new Timer();
try {
timer.startTimer(2);
sleep(1);
long currentTimeTaken = timer.currentTimeTakenInNanoSeconds();
assertTrue(currentTimeTaken>1*1000*1000);
assertTrue(currentTimeTaken<2*1000*1000);
timer.timeCheck();
} catch (TimeoutException e) {
fail();
}
}
@Test(expected = TimeoutException.class)
public void timeoutReached() throws TimeoutException {
Timer timer = new Timer();
timer.startTimer(1);
sleep(2);
timer.timeCheck();
}
private void sleep(long milliseconds){
//I had some difficulties to sleep precisely a number of milliseconds.
//Thread.sleep was not fine...
long start = System.nanoTime();
long timeout = start+milliseconds*1000*1000;
while (System.nanoTime()<timeout){
//do nothing
}
}
}
## Instruction:
Change timer test so that we can reproduce the random issue. Change
timings.
## Code After:
package competitive.programming.timemanagement;
import static org.junit.Assert.fail;
import org.junit.Test;
public class TimerTest {
@Test
public void nonStartedTimerDoesNotTimeout() {
Timer timer = new Timer();
try {
timer.timeCheck();
sleep(1);
timer.timeCheck();
} catch (TimeoutException e) {
fail();
}
}
@Test
public void doesNotTimeOutBeforeTimeoutReached() {
Timer timer = new Timer();
try {
// We have a random failing test here. Try to reproduce the issue
// more frequently.
for (int i = 0; i < 100; i++) {
timer.startTimer(20);
sleep(10);
timer.timeCheck();
}
} catch (TimeoutException e) {
fail();
}
}
@Test(expected = TimeoutException.class)
public void timeoutReached() throws TimeoutException {
Timer timer = new Timer();
timer.startTimer(1);
sleep(2);
timer.timeCheck();
}
private void sleep(long milliseconds) {
// I had some difficulties to sleep precisely a number of milliseconds.
// Thread.sleep was not fine...
long start = System.nanoTime();
long timeout = start + milliseconds * 1000 * 1000;
while (System.nanoTime() < timeout) {
// do nothing
}
}
}
|
# ... existing code ...
package competitive.programming.timemanagement;
import static org.junit.Assert.fail;
import org.junit.Test;
public class TimerTest {
@Test
public void nonStartedTimerDoesNotTimeout() {
Timer timer = new Timer();
try {
timer.timeCheck();
sleep(1);
timer.timeCheck();
} catch (TimeoutException e) {
fail();
}
}
@Test
public void doesNotTimeOutBeforeTimeoutReached() {
Timer timer = new Timer();
try {
// We have a random failing test here. Try to reproduce the issue
// more frequently.
for (int i = 0; i < 100; i++) {
timer.startTimer(20);
sleep(10);
timer.timeCheck();
}
} catch (TimeoutException e) {
fail();
}
}
@Test(expected = TimeoutException.class)
public void timeoutReached() throws TimeoutException {
Timer timer = new Timer();
timer.startTimer(1);
sleep(2);
timer.timeCheck();
}
private void sleep(long milliseconds) {
// I had some difficulties to sleep precisely a number of milliseconds.
// Thread.sleep was not fine...
long start = System.nanoTime();
long timeout = start + milliseconds * 1000 * 1000;
while (System.nanoTime() < timeout) {
// do nothing
}
}
}
# ... rest of the code ...
|
c1da98bc7e31b7b8b0bf922563eb023745663da5
|
src/main/java/org/zalando/problem/spring/web/advice/validation/ValidationAdviceTrait.java
|
src/main/java/org/zalando/problem/spring/web/advice/validation/ValidationAdviceTrait.java
|
package org.zalando.problem.spring.web.advice.validation;
import org.zalando.problem.spring.web.advice.AdviceTrait;
/**
* @see AdviceTrait
*/
public interface ValidationAdviceTrait extends
ConstraintViolationAdviceTrait,
MethodArgumentNotValidAdviceTrait {
}
|
package org.zalando.problem.spring.web.advice.validation;
import org.zalando.problem.spring.web.advice.AdviceTrait;
/**
* Advice trait to handle any validation exceptions.
* <p>
* Be careful if you use {@link org.springframework.validation.beanvalidation.MethodValidationPostProcessor}
* in order to validate method parameter field directly but {@code violations[].field} value looks like {@code arg0}
* instead of parameter name, you have to configure a
* {@link org.springframework.validation.beanvalidation.LocalValidatorFactoryBean} with your
* {@link org.springframework.validation.beanvalidation.MethodValidationPostProcessor} like following:
*
* <pre><code>
* @Bean
* public Validator validator() {
* return new LocalValidatorFactoryBean();
* }
*
* @Bean
* public MethodValidationPostProcessor methodValidationPostProcessor() {
* MethodValidationPostProcessor methodValidationPostProcessor = new MethodValidationPostProcessor();
* methodValidationPostProcessor.setValidator(validator());
* return methodValidationPostProcessor;
* }
* </code></pre>
*
* @see AdviceTrait
*/
public interface ValidationAdviceTrait extends
ConstraintViolationAdviceTrait,
MethodArgumentNotValidAdviceTrait {
}
|
Add some tips when using Spring method validation
|
Add some tips when using Spring method validation
related to #85 discussion
|
Java
|
mit
|
zalando/problem-spring-web,zalando/problem-spring-web
|
java
|
## Code Before:
package org.zalando.problem.spring.web.advice.validation;
import org.zalando.problem.spring.web.advice.AdviceTrait;
/**
* @see AdviceTrait
*/
public interface ValidationAdviceTrait extends
ConstraintViolationAdviceTrait,
MethodArgumentNotValidAdviceTrait {
}
## Instruction:
Add some tips when using Spring method validation
related to #85 discussion
## Code After:
package org.zalando.problem.spring.web.advice.validation;
import org.zalando.problem.spring.web.advice.AdviceTrait;
/**
* Advice trait to handle any validation exceptions.
* <p>
* Be careful if you use {@link org.springframework.validation.beanvalidation.MethodValidationPostProcessor}
* in order to validate method parameter field directly but {@code violations[].field} value looks like {@code arg0}
* instead of parameter name, you have to configure a
* {@link org.springframework.validation.beanvalidation.LocalValidatorFactoryBean} with your
* {@link org.springframework.validation.beanvalidation.MethodValidationPostProcessor} like following:
*
* <pre><code>
* @Bean
* public Validator validator() {
* return new LocalValidatorFactoryBean();
* }
*
* @Bean
* public MethodValidationPostProcessor methodValidationPostProcessor() {
* MethodValidationPostProcessor methodValidationPostProcessor = new MethodValidationPostProcessor();
* methodValidationPostProcessor.setValidator(validator());
* return methodValidationPostProcessor;
* }
* </code></pre>
*
* @see AdviceTrait
*/
public interface ValidationAdviceTrait extends
ConstraintViolationAdviceTrait,
MethodArgumentNotValidAdviceTrait {
}
|
# ... existing code ...
import org.zalando.problem.spring.web.advice.AdviceTrait;
/**
* Advice trait to handle any validation exceptions.
* <p>
* Be careful if you use {@link org.springframework.validation.beanvalidation.MethodValidationPostProcessor}
* in order to validate method parameter field directly but {@code violations[].field} value looks like {@code arg0}
* instead of parameter name, you have to configure a
* {@link org.springframework.validation.beanvalidation.LocalValidatorFactoryBean} with your
* {@link org.springframework.validation.beanvalidation.MethodValidationPostProcessor} like following:
*
* <pre><code>
* @Bean
* public Validator validator() {
* return new LocalValidatorFactoryBean();
* }
*
* @Bean
* public MethodValidationPostProcessor methodValidationPostProcessor() {
* MethodValidationPostProcessor methodValidationPostProcessor = new MethodValidationPostProcessor();
* methodValidationPostProcessor.setValidator(validator());
* return methodValidationPostProcessor;
* }
* </code></pre>
*
* @see AdviceTrait
*/
public interface ValidationAdviceTrait extends
# ... rest of the code ...
|
81ce26a410a2bcd751f40022d78e20c95bf7b60e
|
test/FrontendC/ARM/inline-asm-multichar.c
|
test/FrontendC/ARM/inline-asm-multichar.c
|
// RUN: %llvmgcc -S -march=armv7a %s
// XFAIL: *
// XTARGET: arm
int t1() {
static float k = 1.0f;
CHECK: call void asm sideeffect "flds s15, $0 \0A", "*^Uv,~{s15}"
__asm__ volatile ("flds s15, %[k] \n" :: [k] "Uv,m" (k) : "s15");
return 0;
}
|
// RUN: %llvmgcc -S -march=armv7a %s | FileCheck %s
// XFAIL: *
// XTARGET: arm
int t1() {
static float k = 1.0f;
// CHECK: "flds s15, $0 \0A", "*^Uv,~{s15}"
__asm__ volatile ("flds s15, %[k] \n" :: [k] "Uv,m" (k) : "s15");
return 0;
}
|
Fix this test to actually check something and be able to be compiled.
|
Fix this test to actually check something and be able to be compiled.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@133952 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-llvm,llvm-mirror/llvm,dslab-epfl/asap,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,chubbymaggie/asap,dslab-epfl/asap,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,dslab-epfl/asap,chubbymaggie/asap,chubbymaggie/asap,chubbymaggie/asap,apple/swift-llvm,chubbymaggie/asap,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,chubbymaggie/asap,dslab-epfl/asap,apple/swift-llvm,apple/swift-llvm,dslab-epfl/asap,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm
|
c
|
## Code Before:
// RUN: %llvmgcc -S -march=armv7a %s
// XFAIL: *
// XTARGET: arm
int t1() {
static float k = 1.0f;
CHECK: call void asm sideeffect "flds s15, $0 \0A", "*^Uv,~{s15}"
__asm__ volatile ("flds s15, %[k] \n" :: [k] "Uv,m" (k) : "s15");
return 0;
}
## Instruction:
Fix this test to actually check something and be able to be compiled.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@133952 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %llvmgcc -S -march=armv7a %s | FileCheck %s
// XFAIL: *
// XTARGET: arm
int t1() {
static float k = 1.0f;
// CHECK: "flds s15, $0 \0A", "*^Uv,~{s15}"
__asm__ volatile ("flds s15, %[k] \n" :: [k] "Uv,m" (k) : "s15");
return 0;
}
|
# ... existing code ...
// RUN: %llvmgcc -S -march=armv7a %s | FileCheck %s
// XFAIL: *
// XTARGET: arm
# ... modified code ...
int t1() {
static float k = 1.0f;
// CHECK: "flds s15, $0 \0A", "*^Uv,~{s15}"
__asm__ volatile ("flds s15, %[k] \n" :: [k] "Uv,m" (k) : "s15");
return 0;
}
# ... rest of the code ...
|
b375893692e26c15beb38e88f8b822a8dd35c03c
|
src/test/java/info/u_team/u_team_test/block/BasicBlock.java
|
src/test/java/info/u_team/u_team_test/block/BasicBlock.java
|
package info.u_team.u_team_test.block;
import info.u_team.u_team_core.block.UBlock;
import info.u_team.u_team_test.init.TestItemGroups;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.item.*;
public class BasicBlock extends UBlock {
public BasicBlock(String name) {
super(name, TestItemGroups.GROUP, Properties.create(Material.ROCK).hardnessAndResistance(2F).sound(SoundType.GROUND).slipperiness(0.8F).lightValue(1), new Item.Properties().rarity(Rarity.UNCOMMON));
}
}
|
package info.u_team.u_team_test.block;
import info.u_team.u_team_core.block.UBlock;
import info.u_team.u_team_test.init.TestItemGroups;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.item.*;
public class BasicBlock extends UBlock {
public BasicBlock() {
super(TestItemGroups.GROUP, Properties.create(Material.ROCK).hardnessAndResistance(2F).sound(SoundType.GROUND).slipperiness(0.8F).lightValue(1), new Item.Properties().rarity(Rarity.UNCOMMON));
}
}
|
Remove String name from basic block
|
Remove String name from basic block
|
Java
|
apache-2.0
|
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
|
java
|
## Code Before:
package info.u_team.u_team_test.block;
import info.u_team.u_team_core.block.UBlock;
import info.u_team.u_team_test.init.TestItemGroups;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.item.*;
public class BasicBlock extends UBlock {
public BasicBlock(String name) {
super(name, TestItemGroups.GROUP, Properties.create(Material.ROCK).hardnessAndResistance(2F).sound(SoundType.GROUND).slipperiness(0.8F).lightValue(1), new Item.Properties().rarity(Rarity.UNCOMMON));
}
}
## Instruction:
Remove String name from basic block
## Code After:
package info.u_team.u_team_test.block;
import info.u_team.u_team_core.block.UBlock;
import info.u_team.u_team_test.init.TestItemGroups;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.item.*;
public class BasicBlock extends UBlock {
public BasicBlock() {
super(TestItemGroups.GROUP, Properties.create(Material.ROCK).hardnessAndResistance(2F).sound(SoundType.GROUND).slipperiness(0.8F).lightValue(1), new Item.Properties().rarity(Rarity.UNCOMMON));
}
}
|
...
public class BasicBlock extends UBlock {
public BasicBlock() {
super(TestItemGroups.GROUP, Properties.create(Material.ROCK).hardnessAndResistance(2F).sound(SoundType.GROUND).slipperiness(0.8F).lightValue(1), new Item.Properties().rarity(Rarity.UNCOMMON));
}
}
...
|
0c04f830cf2b4aa5ba85712b78fc9dcea94b9ab0
|
src/main/java/se/kits/gakusei/content/repository/GrammarTextRepository.java
|
src/main/java/se/kits/gakusei/content/repository/GrammarTextRepository.java
|
package se.kits.gakusei.content.repository;
import org.springframework.data.repository.CrudRepository;
import se.kits.gakusei.content.model.GrammarText;
public interface GrammarTextRepository extends CrudRepository<GrammarText, String> {
}
|
package se.kits.gakusei.content.repository;
import org.springframework.data.repository.CrudRepository;
import se.kits.gakusei.content.model.GrammarText;
import java.util.List;
public interface GrammarTextRepository extends CrudRepository<GrammarText, String> {
List<GrammarText> findByInflectionMethod(String inflectionMethod);
}
|
Add method for finding grammarText using inflectionMethod
|
Add method for finding grammarText using inflectionMethod
|
Java
|
mit
|
kits-ab/gakusei,kits-ab/gakusei,kits-ab/gakusei
|
java
|
## Code Before:
package se.kits.gakusei.content.repository;
import org.springframework.data.repository.CrudRepository;
import se.kits.gakusei.content.model.GrammarText;
public interface GrammarTextRepository extends CrudRepository<GrammarText, String> {
}
## Instruction:
Add method for finding grammarText using inflectionMethod
## Code After:
package se.kits.gakusei.content.repository;
import org.springframework.data.repository.CrudRepository;
import se.kits.gakusei.content.model.GrammarText;
import java.util.List;
public interface GrammarTextRepository extends CrudRepository<GrammarText, String> {
List<GrammarText> findByInflectionMethod(String inflectionMethod);
}
|
# ... existing code ...
import org.springframework.data.repository.CrudRepository;
import se.kits.gakusei.content.model.GrammarText;
import java.util.List;
public interface GrammarTextRepository extends CrudRepository<GrammarText, String> {
List<GrammarText> findByInflectionMethod(String inflectionMethod);
}
# ... rest of the code ...
|
6fc9032bc372aad7b9c1217b44ff081ac9108af2
|
manoseimas/common/tests/utils/test_words.py
|
manoseimas/common/tests/utils/test_words.py
|
from __future__ import unicode_literals
import unittest
from manoseimas.scrapy import textutils
class WordCountTest(unittest.TestCase):
def test_get_word_count(self):
word_count = textutils.get_word_count('Žodžiai, lietuviškai.')
self.assertEqual(word_count, 2)
def test_get_words(self):
words = textutils.get_words('Žodžiai, lietuviškai.')
self.assertEqual(words, ['Žodžiai', 'lietuviškai'])
|
from __future__ import unicode_literals
import unittest
from manoseimas.common.utils import words
class WordCountTest(unittest.TestCase):
def test_get_word_count(self):
word_count = words.get_word_count('Žodžiai, lietuviškai.')
self.assertEqual(word_count, 2)
def test_get_words(self):
words_list = words.get_words('Žodžiai, lietuviškai.')
self.assertEqual(words_list, ['Žodžiai', 'lietuviškai'])
|
Fix word_count test import paths.
|
Fix word_count test import paths.
|
Python
|
agpl-3.0
|
ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt
|
python
|
## Code Before:
from __future__ import unicode_literals
import unittest
from manoseimas.scrapy import textutils
class WordCountTest(unittest.TestCase):
def test_get_word_count(self):
word_count = textutils.get_word_count('Žodžiai, lietuviškai.')
self.assertEqual(word_count, 2)
def test_get_words(self):
words = textutils.get_words('Žodžiai, lietuviškai.')
self.assertEqual(words, ['Žodžiai', 'lietuviškai'])
## Instruction:
Fix word_count test import paths.
## Code After:
from __future__ import unicode_literals
import unittest
from manoseimas.common.utils import words
class WordCountTest(unittest.TestCase):
def test_get_word_count(self):
word_count = words.get_word_count('Žodžiai, lietuviškai.')
self.assertEqual(word_count, 2)
def test_get_words(self):
words_list = words.get_words('Žodžiai, lietuviškai.')
self.assertEqual(words_list, ['Žodžiai', 'lietuviškai'])
|
# ... existing code ...
import unittest
from manoseimas.common.utils import words
class WordCountTest(unittest.TestCase):
def test_get_word_count(self):
word_count = words.get_word_count('Žodžiai, lietuviškai.')
self.assertEqual(word_count, 2)
def test_get_words(self):
words_list = words.get_words('Žodžiai, lietuviškai.')
self.assertEqual(words_list, ['Žodžiai', 'lietuviškai'])
# ... rest of the code ...
|
0a7104680d1aeaa7096f9bb7603cd63d46efb480
|
wikitables/util.py
|
wikitables/util.py
|
import sys
import json
def ftag(t):
return lambda node: node.tag == t
def ustr(s):
if sys.version_info < (3, 0):
#py2
return unicode(s).encode('utf-8')
else:
return str(s)
class TableJSONEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, '__json__'):
return obj.__json__()
return json.JSONEncoder.default(self, obj)
|
import sys
import json
def ftag(t):
return lambda node: node.tag == t
def ustr(s):
if sys.version_info < (3, 0):
#py2
try:
return unicode(s).encode('utf-8')
except UnicodeDecodeError:
return str(s)
else:
return str(s)
class TableJSONEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, '__json__'):
return obj.__json__()
return json.JSONEncoder.default(self, obj)
|
Add try catch to avoid unicode decode exception in python 2.7.10
|
Add try catch to avoid unicode decode exception in python 2.7.10
|
Python
|
mit
|
bcicen/wikitables
|
python
|
## Code Before:
import sys
import json
def ftag(t):
return lambda node: node.tag == t
def ustr(s):
if sys.version_info < (3, 0):
#py2
return unicode(s).encode('utf-8')
else:
return str(s)
class TableJSONEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, '__json__'):
return obj.__json__()
return json.JSONEncoder.default(self, obj)
## Instruction:
Add try catch to avoid unicode decode exception in python 2.7.10
## Code After:
import sys
import json
def ftag(t):
return lambda node: node.tag == t
def ustr(s):
if sys.version_info < (3, 0):
#py2
try:
return unicode(s).encode('utf-8')
except UnicodeDecodeError:
return str(s)
else:
return str(s)
class TableJSONEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, '__json__'):
return obj.__json__()
return json.JSONEncoder.default(self, obj)
|
// ... existing code ...
def ustr(s):
if sys.version_info < (3, 0):
#py2
try:
return unicode(s).encode('utf-8')
except UnicodeDecodeError:
return str(s)
else:
return str(s)
// ... rest of the code ...
|
dd5774c30f950c8a52b977a5529300e8edce4bc7
|
migrations/versions/2c240cb3edd1_.py
|
migrations/versions/2c240cb3edd1_.py
|
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, default=0))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
|
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
|
Fix proper default values for metadata migration
|
Fix proper default values for metadata migration
|
Python
|
mit
|
streamr/marvin,streamr/marvin,streamr/marvin
|
python
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, default=0))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
## Instruction:
Fix proper default values for metadata migration
## Code After:
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
|
...
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
...
|
059230327fcebb35c881f8a6bc2ee12fed29d442
|
mcp/config.py
|
mcp/config.py
|
import os
from conary import conarycfg
from conary.lib import cfgtypes
class MCPConfig(conarycfg.ConfigFile):
basePath = os.path.join(os.path.sep, 'srv', 'rbuilder', 'mcp')
logPath = os.path.join(basePath, 'logs')
queueHost = '127.0.0.1'
queuePort = (cfgtypes.CfgInt, 61613)
namespace = 'mcp'
slaveTroveName = 'group-jobslave'
slaveTroveLabel = 'products.rpath.com@rpath:js'
|
import os
from conary import conarycfg
from conary.lib import cfgtypes
class MCPConfig(conarycfg.ConfigFile):
basePath = os.path.join(os.path.sep, 'srv', 'rbuilder', 'mcp')
logPath = os.path.join(os.path.sep, 'var', 'log', 'rbuilder')
queueHost = '127.0.0.1'
queuePort = (cfgtypes.CfgInt, 61613)
namespace = 'mcp'
slaveTroveName = 'group-jobslave'
slaveTroveLabel = 'products.rpath.com@rpath:js'
|
Move default location for MCP logs into /var/log/rbuilder/
|
Move default location for MCP logs into /var/log/rbuilder/
|
Python
|
apache-2.0
|
sassoftware/mcp,sassoftware/mcp
|
python
|
## Code Before:
import os
from conary import conarycfg
from conary.lib import cfgtypes
class MCPConfig(conarycfg.ConfigFile):
basePath = os.path.join(os.path.sep, 'srv', 'rbuilder', 'mcp')
logPath = os.path.join(basePath, 'logs')
queueHost = '127.0.0.1'
queuePort = (cfgtypes.CfgInt, 61613)
namespace = 'mcp'
slaveTroveName = 'group-jobslave'
slaveTroveLabel = 'products.rpath.com@rpath:js'
## Instruction:
Move default location for MCP logs into /var/log/rbuilder/
## Code After:
import os
from conary import conarycfg
from conary.lib import cfgtypes
class MCPConfig(conarycfg.ConfigFile):
basePath = os.path.join(os.path.sep, 'srv', 'rbuilder', 'mcp')
logPath = os.path.join(os.path.sep, 'var', 'log', 'rbuilder')
queueHost = '127.0.0.1'
queuePort = (cfgtypes.CfgInt, 61613)
namespace = 'mcp'
slaveTroveName = 'group-jobslave'
slaveTroveLabel = 'products.rpath.com@rpath:js'
|
...
class MCPConfig(conarycfg.ConfigFile):
basePath = os.path.join(os.path.sep, 'srv', 'rbuilder', 'mcp')
logPath = os.path.join(os.path.sep, 'var', 'log', 'rbuilder')
queueHost = '127.0.0.1'
queuePort = (cfgtypes.CfgInt, 61613)
...
|
4987275ab868aa98359d6583c7817c4adf09000b
|
zipeggs.py
|
zipeggs.py
|
import logging, os, zc.buildout, sys, shutil
class ZipEggs:
def __init__(self, buildout, name, options):
self.name, self.options = name, options
if options['target'] is None:
raise zc.buildout.UserError('Invalid Target')
if options['source'] is None:
raise zc.buildout.UserError('Invalid Source')
def zipit(self):
target = self.options['target']
if not os.path.exists(target):
os.mkdir(target)
path = self.options['source']
for dirs in os.listdir(path):
try:
source = os.path.join(path, dirs)
dist = "%s/%s" % (target, dirs)
print "%s > %s" % (source, dist)
shutil.make_archive(dist, "zip", source)
os.rename(dist+".zip", dist)
except OSError:
print "ignore %s" % dirs
return []
def install(self):
return self.zipit()
def update(self):
return self.zipit()
|
import logging, os, zc.buildout, sys, shutil
class ZipEggs:
def __init__(self, buildout, name, options):
self.name, self.options = name, options
if options['target'] is None:
raise zc.buildout.UserError('Invalid Target')
if options['source'] is None:
raise zc.buildout.UserError('Invalid Source')
def zipit(self):
target_dir = self.options['target']
if not os.path.exists(target_dir):
os.mkdir(target_dir)
source_dir = self.options['source']
for entry in os.listdir(source_dir):
try:
source = os.path.join(source_dir, entry)
target = "%s/%s" % (target_dir, entry)
print "%s > %s" % (source, target)
shutil.make_archive(target, "zip", source)
os.rename(target+".zip", target)
except OSError:
print "ignore %s" % entry
return []
def install(self):
return self.zipit()
def update(self):
return self.zipit()
|
Improve variable names for clarity
|
Improve variable names for clarity
|
Python
|
apache-2.0
|
tamizhgeek/zipeggs
|
python
|
## Code Before:
import logging, os, zc.buildout, sys, shutil
class ZipEggs:
def __init__(self, buildout, name, options):
self.name, self.options = name, options
if options['target'] is None:
raise zc.buildout.UserError('Invalid Target')
if options['source'] is None:
raise zc.buildout.UserError('Invalid Source')
def zipit(self):
target = self.options['target']
if not os.path.exists(target):
os.mkdir(target)
path = self.options['source']
for dirs in os.listdir(path):
try:
source = os.path.join(path, dirs)
dist = "%s/%s" % (target, dirs)
print "%s > %s" % (source, dist)
shutil.make_archive(dist, "zip", source)
os.rename(dist+".zip", dist)
except OSError:
print "ignore %s" % dirs
return []
def install(self):
return self.zipit()
def update(self):
return self.zipit()
## Instruction:
Improve variable names for clarity
## Code After:
import logging, os, zc.buildout, sys, shutil
class ZipEggs:
def __init__(self, buildout, name, options):
self.name, self.options = name, options
if options['target'] is None:
raise zc.buildout.UserError('Invalid Target')
if options['source'] is None:
raise zc.buildout.UserError('Invalid Source')
def zipit(self):
target_dir = self.options['target']
if not os.path.exists(target_dir):
os.mkdir(target_dir)
source_dir = self.options['source']
for entry in os.listdir(source_dir):
try:
source = os.path.join(source_dir, entry)
target = "%s/%s" % (target_dir, entry)
print "%s > %s" % (source, target)
shutil.make_archive(target, "zip", source)
os.rename(target+".zip", target)
except OSError:
print "ignore %s" % entry
return []
def install(self):
return self.zipit()
def update(self):
return self.zipit()
|
// ... existing code ...
def zipit(self):
target_dir = self.options['target']
if not os.path.exists(target_dir):
os.mkdir(target_dir)
source_dir = self.options['source']
for entry in os.listdir(source_dir):
try:
source = os.path.join(source_dir, entry)
target = "%s/%s" % (target_dir, entry)
print "%s > %s" % (source, target)
shutil.make_archive(target, "zip", source)
os.rename(target+".zip", target)
except OSError:
print "ignore %s" % entry
return []
def install(self):
// ... rest of the code ...
|
40d204c996e41a030dac240c99c66a25f8f8586e
|
scripts/generate-bcrypt-hashed-password.py
|
scripts/generate-bcrypt-hashed-password.py
|
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
Fix "string argument without an encoding" python3 error in bcrypt script
|
Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.
|
Python
|
mit
|
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
|
python
|
## Code Before:
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
## Instruction:
Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.
## Code After:
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
// ... existing code ...
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
// ... rest of the code ...
|
bc661eeddb6742bea85e473db535af54bc29bbab
|
jctools-core/src/test/java/org/jctools/queues/MpqSanityTestMpscCompound.java
|
jctools-core/src/test/java/org/jctools/queues/MpqSanityTestMpscCompound.java
|
package org.jctools.queues;
import org.jctools.queues.spec.ConcurrentQueueSpec;
import org.jctools.queues.spec.Ordering;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Collection;
import static org.jctools.util.PortableJvmInfo.CPUs;
@RunWith(Parameterized.class)
public class MpqSanityTestMpscCompound extends MpqSanityTest
{
public MpqSanityTestMpscCompound(ConcurrentQueueSpec spec, MessagePassingQueue<Integer> queue)
{
super(spec, queue);
}
@Parameterized.Parameters
public static Collection<Object[]> parameters()
{
ArrayList<Object[]> list = new ArrayList<Object[]>();
list.add(makeMpq(0, 1, CPUs, Ordering.NONE, null));// MPSC size 1
list.add(makeMpq(0, 1, SIZE, Ordering.NONE, null));// MPSC size SIZE
return list;
}
}
|
package org.jctools.queues;
import org.jctools.queues.spec.ConcurrentQueueSpec;
import org.jctools.queues.spec.Ordering;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Collection;
import static org.jctools.util.PortableJvmInfo.CPUs;
@RunWith(Parameterized.class)
@Ignore
public class MpqSanityTestMpscCompound extends MpqSanityTest
{
public MpqSanityTestMpscCompound(ConcurrentQueueSpec spec, MessagePassingQueue<Integer> queue)
{
super(spec, queue);
}
@Parameterized.Parameters
public static Collection<Object[]> parameters()
{
ArrayList<Object[]> list = new ArrayList<Object[]>();
list.add(makeMpq(0, 1, CPUs, Ordering.NONE, null));// MPSC size 1
list.add(makeMpq(0, 1, SIZE, Ordering.NONE, null));// MPSC size SIZE
return list;
}
}
|
Disable test until bug fixed.
|
Disable test until bug fixed.
|
Java
|
apache-2.0
|
JCTools/JCTools
|
java
|
## Code Before:
package org.jctools.queues;
import org.jctools.queues.spec.ConcurrentQueueSpec;
import org.jctools.queues.spec.Ordering;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Collection;
import static org.jctools.util.PortableJvmInfo.CPUs;
@RunWith(Parameterized.class)
public class MpqSanityTestMpscCompound extends MpqSanityTest
{
public MpqSanityTestMpscCompound(ConcurrentQueueSpec spec, MessagePassingQueue<Integer> queue)
{
super(spec, queue);
}
@Parameterized.Parameters
public static Collection<Object[]> parameters()
{
ArrayList<Object[]> list = new ArrayList<Object[]>();
list.add(makeMpq(0, 1, CPUs, Ordering.NONE, null));// MPSC size 1
list.add(makeMpq(0, 1, SIZE, Ordering.NONE, null));// MPSC size SIZE
return list;
}
}
## Instruction:
Disable test until bug fixed.
## Code After:
package org.jctools.queues;
import org.jctools.queues.spec.ConcurrentQueueSpec;
import org.jctools.queues.spec.Ordering;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Collection;
import static org.jctools.util.PortableJvmInfo.CPUs;
@RunWith(Parameterized.class)
@Ignore
public class MpqSanityTestMpscCompound extends MpqSanityTest
{
public MpqSanityTestMpscCompound(ConcurrentQueueSpec spec, MessagePassingQueue<Integer> queue)
{
super(spec, queue);
}
@Parameterized.Parameters
public static Collection<Object[]> parameters()
{
ArrayList<Object[]> list = new ArrayList<Object[]>();
list.add(makeMpq(0, 1, CPUs, Ordering.NONE, null));// MPSC size 1
list.add(makeMpq(0, 1, SIZE, Ordering.NONE, null));// MPSC size SIZE
return list;
}
}
|
# ... existing code ...
import org.jctools.queues.spec.ConcurrentQueueSpec;
import org.jctools.queues.spec.Ordering;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
# ... modified code ...
import static org.jctools.util.PortableJvmInfo.CPUs;
@RunWith(Parameterized.class)
@Ignore
public class MpqSanityTestMpscCompound extends MpqSanityTest
{
public MpqSanityTestMpscCompound(ConcurrentQueueSpec spec, MessagePassingQueue<Integer> queue)
# ... rest of the code ...
|
96da00377891c31244a0e6435a31608169ceae02
|
src/com/spi.h
|
src/com/spi.h
|
//
// spi.h
// Ethernet Shield
//
// Created by EFCM van der Werf on 12/28/13.
// Copyright (c) 2013 EFCM van der Werf. All rights reserved.
//
#ifndef COM_SPI_H
#define COM_SPI_H
#include "../config.h"
// Do we want SPI?
#ifdef COM_SPI
#include <inttypes.h>
/**
* SPI config
*/
typedef struct spi_config {
};
/**
* @brief Initialize SPI channel
* @param config Configuration for spi channel
*/
extern void spi_init(spi_config *config);
#define SPI_START(port, pin) (port) &= ~(1 << (pin))
#define SPI_STOP(port, pin) (port) |= (1 << (pin))
#endif // COM_SPI
#endif // COM_SPI_H
|
//
// spi.h
// Ethernet Shield
//
// Created by EFCM van der Werf on 12/28/13.
// Copyright (c) 2013 EFCM van der Werf. All rights reserved.
//
#ifndef COM_SPI_H
#define COM_SPI_H
#include "../config.h"
// Do we want SPI?
#ifdef COM_SPI
#include <inttypes.h>
/**
* SPI config
*/
typedef struct spi_config {
};
/**
* @brief Initialize SPI channel
* @param config Configuration for spi channel
*/
extern void spi_init(spi_config *config);
#define SPI_ACTIVE(port, pin) (port) &= ~(1 << (pin))
#define SPI_PASSIVE(port, pin) (port) |= (1 << (pin))
#endif // COM_SPI
#endif // COM_SPI_H
|
Rename SPI_START to SPI_ACTIVE and SPI_STOP to SPI_PASSIVE
|
Rename SPI_START to SPI_ACTIVE and SPI_STOP to SPI_PASSIVE
|
C
|
mit
|
fuegas/dollhouse-ethshield,slashdev/slashnet,fuegas/dollhouse-ethshield,slashdev/slashnet
|
c
|
## Code Before:
//
// spi.h
// Ethernet Shield
//
// Created by EFCM van der Werf on 12/28/13.
// Copyright (c) 2013 EFCM van der Werf. All rights reserved.
//
#ifndef COM_SPI_H
#define COM_SPI_H
#include "../config.h"
// Do we want SPI?
#ifdef COM_SPI
#include <inttypes.h>
/**
* SPI config
*/
typedef struct spi_config {
};
/**
* @brief Initialize SPI channel
* @param config Configuration for spi channel
*/
extern void spi_init(spi_config *config);
#define SPI_START(port, pin) (port) &= ~(1 << (pin))
#define SPI_STOP(port, pin) (port) |= (1 << (pin))
#endif // COM_SPI
#endif // COM_SPI_H
## Instruction:
Rename SPI_START to SPI_ACTIVE and SPI_STOP to SPI_PASSIVE
## Code After:
//
// spi.h
// Ethernet Shield
//
// Created by EFCM van der Werf on 12/28/13.
// Copyright (c) 2013 EFCM van der Werf. All rights reserved.
//
#ifndef COM_SPI_H
#define COM_SPI_H
#include "../config.h"
// Do we want SPI?
#ifdef COM_SPI
#include <inttypes.h>
/**
* SPI config
*/
typedef struct spi_config {
};
/**
* @brief Initialize SPI channel
* @param config Configuration for spi channel
*/
extern void spi_init(spi_config *config);
#define SPI_ACTIVE(port, pin) (port) &= ~(1 << (pin))
#define SPI_PASSIVE(port, pin) (port) |= (1 << (pin))
#endif // COM_SPI
#endif // COM_SPI_H
|
// ... existing code ...
*/
extern void spi_init(spi_config *config);
#define SPI_ACTIVE(port, pin) (port) &= ~(1 << (pin))
#define SPI_PASSIVE(port, pin) (port) |= (1 << (pin))
#endif // COM_SPI
#endif // COM_SPI_H
// ... rest of the code ...
|
51ed7294ff5c57a4453fd02a5f1043ed9ef5b2e3
|
SeriesGuide/src/com/battlelancer/seriesguide/PrefBackupHelper.java
|
SeriesGuide/src/com/battlelancer/seriesguide/PrefBackupHelper.java
|
package com.battlelancer.seriesguide;
import android.app.backup.BackupAgentHelper;
import android.app.backup.SharedPreferencesBackupHelper;
public class PrefBackupHelper extends BackupAgentHelper {
// The name of the SharedPreferences file
static final String PREFS = "com.battlelancer.seriesguide_preferences";
// A key to uniquely identify the set of backup data
static final String PREFS_BACKUP_KEY = "prefs";
// Allocate a helper and add it to the backup agent
@Override
public void onCreate() {
SharedPreferencesBackupHelper helper = new SharedPreferencesBackupHelper(this, PREFS);
addHelper(PREFS_BACKUP_KEY, helper);
}
}
|
package com.battlelancer.seriesguide;
import android.app.backup.BackupAgentHelper;
import android.app.backup.SharedPreferencesBackupHelper;
public class PrefBackupHelper extends BackupAgentHelper {
// A key to uniquely identify the set of backup data
static final String PREFS_BACKUP_KEY = "prefs";
// Allocate a helper and add it to the backup agent
@Override
public void onCreate() {
SharedPreferencesBackupHelper helper = new SharedPreferencesBackupHelper(this,
this.getPackageName() + "_preferences");
addHelper(PREFS_BACKUP_KEY, helper);
}
}
|
Use correct package name for backup helper (so it will work with the beta, too).
|
Use correct package name for backup helper (so it will work with the
beta, too).
|
Java
|
apache-2.0
|
epiphany27/SeriesGuide,0359xiaodong/SeriesGuide,r00t-user/SeriesGuide,hoanganhx86/SeriesGuide,UweTrottmann/SeriesGuide,UweTrottmann/SeriesGuide,artemnikitin/SeriesGuide
|
java
|
## Code Before:
package com.battlelancer.seriesguide;
import android.app.backup.BackupAgentHelper;
import android.app.backup.SharedPreferencesBackupHelper;
public class PrefBackupHelper extends BackupAgentHelper {
// The name of the SharedPreferences file
static final String PREFS = "com.battlelancer.seriesguide_preferences";
// A key to uniquely identify the set of backup data
static final String PREFS_BACKUP_KEY = "prefs";
// Allocate a helper and add it to the backup agent
@Override
public void onCreate() {
SharedPreferencesBackupHelper helper = new SharedPreferencesBackupHelper(this, PREFS);
addHelper(PREFS_BACKUP_KEY, helper);
}
}
## Instruction:
Use correct package name for backup helper (so it will work with the
beta, too).
## Code After:
package com.battlelancer.seriesguide;
import android.app.backup.BackupAgentHelper;
import android.app.backup.SharedPreferencesBackupHelper;
public class PrefBackupHelper extends BackupAgentHelper {
// A key to uniquely identify the set of backup data
static final String PREFS_BACKUP_KEY = "prefs";
// Allocate a helper and add it to the backup agent
@Override
public void onCreate() {
SharedPreferencesBackupHelper helper = new SharedPreferencesBackupHelper(this,
this.getPackageName() + "_preferences");
addHelper(PREFS_BACKUP_KEY, helper);
}
}
|
...
import android.app.backup.SharedPreferencesBackupHelper;
public class PrefBackupHelper extends BackupAgentHelper {
// A key to uniquely identify the set of backup data
static final String PREFS_BACKUP_KEY = "prefs";
...
// Allocate a helper and add it to the backup agent
@Override
public void onCreate() {
SharedPreferencesBackupHelper helper = new SharedPreferencesBackupHelper(this,
this.getPackageName() + "_preferences");
addHelper(PREFS_BACKUP_KEY, helper);
}
}
...
|
a767a3f72191d431fd9f03543b812bf755fd48de
|
src/main/java/org/biojava/examples/MapChains.java
|
src/main/java/org/biojava/examples/MapChains.java
|
package org.biojava.examples;
import javax.vecmath.Point3d;
import org.apache.spark.api.java.JavaPairRDD;
import org.biojava.spark.data.StructureDataRDD;
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* @author Anthony Bradley
*
*/
public class MapChains {
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* @param args
*/
public static void main(String[] args) {
JavaPairRDD<String, Point3d[]> calphaChains =
new StructureDataRDD("/Users/anthony/full")
.filterResolution(3.0)
.filterRfree(0.3)
.getCalphaPair();
}
}
|
package org.biojava.examples;
import org.biojava.spark.data.StructureDataRDD;
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* Calculate the mean C-alpha length in the PDB.
* @author Anthony Bradley
*
*/
public class MapChains {
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* Calculate the mean C-alpha length in the PDB.
* @param args
*/
public static void main(String[] args) {
// Starter counter
Long startTime = System.currentTimeMillis();
Double meanCalphaLength =
new StructureDataRDD("/Users/anthony/full")
.filterResolution(3.0)
.filterRfree(0.3)
.getCalphaPair()
.mapToDouble(t -> t._2.length)
.mean();
System.out.println(meanCalphaLength+" is the mean C-alpha length in the PDB");
System.out.println("Found in "+(System.currentTimeMillis()-startTime)+" ms");
}
}
|
Clean up the C-alpha example.
|
Clean up the C-alpha example.
|
Java
|
lgpl-2.1
|
biojava/biojava-spark,abradle/biojava-spark
|
java
|
## Code Before:
package org.biojava.examples;
import javax.vecmath.Point3d;
import org.apache.spark.api.java.JavaPairRDD;
import org.biojava.spark.data.StructureDataRDD;
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* @author Anthony Bradley
*
*/
public class MapChains {
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* @param args
*/
public static void main(String[] args) {
JavaPairRDD<String, Point3d[]> calphaChains =
new StructureDataRDD("/Users/anthony/full")
.filterResolution(3.0)
.filterRfree(0.3)
.getCalphaPair();
}
}
## Instruction:
Clean up the C-alpha example.
## Code After:
package org.biojava.examples;
import org.biojava.spark.data.StructureDataRDD;
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* Calculate the mean C-alpha length in the PDB.
* @author Anthony Bradley
*
*/
public class MapChains {
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* Calculate the mean C-alpha length in the PDB.
* @param args
*/
public static void main(String[] args) {
// Starter counter
Long startTime = System.currentTimeMillis();
Double meanCalphaLength =
new StructureDataRDD("/Users/anthony/full")
.filterResolution(3.0)
.filterRfree(0.3)
.getCalphaPair()
.mapToDouble(t -> t._2.length)
.mean();
System.out.println(meanCalphaLength+" is the mean C-alpha length in the PDB");
System.out.println("Found in "+(System.currentTimeMillis()-startTime)+" ms");
}
}
|
...
package org.biojava.examples;
import org.biojava.spark.data.StructureDataRDD;
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* Calculate the mean C-alpha length in the PDB.
* @author Anthony Bradley
*
*/
...
/**
* Example of mapping the PDB to chains of just C-alpha coords.
* Calculate the mean C-alpha length in the PDB.
* @param args
*/
public static void main(String[] args) {
// Starter counter
Long startTime = System.currentTimeMillis();
Double meanCalphaLength =
new StructureDataRDD("/Users/anthony/full")
.filterResolution(3.0)
.filterRfree(0.3)
.getCalphaPair()
.mapToDouble(t -> t._2.length)
.mean();
System.out.println(meanCalphaLength+" is the mean C-alpha length in the PDB");
System.out.println("Found in "+(System.currentTimeMillis()-startTime)+" ms");
}
}
...
|
9a9ecde6f88a6c969f23dbcfc5bbc7e611f7f138
|
version_info/get_version.py
|
version_info/get_version.py
|
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield (name,) + version_func(path)
|
import collections
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
VersionSpec = collections.namedtuple('VersionSpec', ('name', 'tag', 'commit'))
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield VersionSpec(name, *version_func(path))
|
Make find_versions return a namedtuple as documented
|
Make find_versions return a namedtuple as documented
|
Python
|
mit
|
TyMaszWeb/python-version-info
|
python
|
## Code Before:
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield (name,) + version_func(path)
## Instruction:
Make find_versions return a namedtuple as documented
## Code After:
import collections
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
VersionSpec = collections.namedtuple('VersionSpec', ('name', 'tag', 'commit'))
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield VersionSpec(name, *version_func(path))
|
// ... existing code ...
import collections
import git
import version_info.exceptions
// ... modified code ...
'get_git_version',
'find_versions',
)
VersionSpec = collections.namedtuple('VersionSpec', ('name', 'tag', 'commit'))
def get_git_version(path):
...
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield VersionSpec(name, *version_func(path))
// ... rest of the code ...
|
3136f7e37b339252d4c1f5642974e180070c452d
|
kirppu/signals.py
|
kirppu/signals.py
|
from django.db.models.signals import pre_save, pre_delete
from django.dispatch import receiver
@receiver(pre_save)
def save_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Saving objects in non-default database should not happen")
@receiver(pre_delete)
def delete_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Deleting objects from non-default database should not happen")
|
from django.db.models.signals import pre_migrate, post_migrate
from django.dispatch import receiver
ENABLE_CHECK = True
@receiver(pre_migrate)
def pre_migrate_handler(*args, **kwargs):
global ENABLE_CHECK
ENABLE_CHECK = False
@receiver(post_migrate)
def post_migrate_handler(*args, **kwargs):
global ENABLE_CHECK
ENABLE_CHECK = True
def save_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Saving objects in non-default database should not happen")
def delete_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Deleting objects from non-default database should not happen")
|
Allow migrations to be run on extra databases.
|
Allow migrations to be run on extra databases.
- Remove duplicate registration of save and delete signals. Already
registered in apps.
|
Python
|
mit
|
jlaunonen/kirppu,jlaunonen/kirppu,jlaunonen/kirppu,jlaunonen/kirppu
|
python
|
## Code Before:
from django.db.models.signals import pre_save, pre_delete
from django.dispatch import receiver
@receiver(pre_save)
def save_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Saving objects in non-default database should not happen")
@receiver(pre_delete)
def delete_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Deleting objects from non-default database should not happen")
## Instruction:
Allow migrations to be run on extra databases.
- Remove duplicate registration of save and delete signals. Already
registered in apps.
## Code After:
from django.db.models.signals import pre_migrate, post_migrate
from django.dispatch import receiver
ENABLE_CHECK = True
@receiver(pre_migrate)
def pre_migrate_handler(*args, **kwargs):
global ENABLE_CHECK
ENABLE_CHECK = False
@receiver(post_migrate)
def post_migrate_handler(*args, **kwargs):
global ENABLE_CHECK
ENABLE_CHECK = True
def save_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Saving objects in non-default database should not happen")
def delete_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Deleting objects from non-default database should not happen")
|
// ... existing code ...
from django.db.models.signals import pre_migrate, post_migrate
from django.dispatch import receiver
ENABLE_CHECK = True
@receiver(pre_migrate)
def pre_migrate_handler(*args, **kwargs):
global ENABLE_CHECK
ENABLE_CHECK = False
@receiver(post_migrate)
def post_migrate_handler(*args, **kwargs):
global ENABLE_CHECK
ENABLE_CHECK = True
def save_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Saving objects in non-default database should not happen")
def delete_handler(sender, instance, using, **kwargs):
# noinspection PyProtectedMember
if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default":
raise ValueError("Deleting objects from non-default database should not happen")
// ... rest of the code ...
|
51c37e74da9fe2bfc068fd29a52422c84b13900d
|
froide/frontpage/models.py
|
froide/frontpage/models.py
|
from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from foirequest.models import FoiRequest
class FeaturedRequestManager(CurrentSiteManager):
def getFeatured(self):
try:
return self.get_query_set().order_by("-timestamp").select_related('request', 'request__publicbody')[0]
except IndexError:
return None
class FeaturedRequest(models.Model):
request = models.ForeignKey(FoiRequest,
verbose_name=_("Featured Request"))
timestamp = models.DateTimeField()
title = models.CharField(max_length=255)
text = models.TextField()
url = models.CharField(max_length=255, blank=True)
user = models.ForeignKey(User, null=True,
on_delete=models.SET_NULL,
verbose_name=_("User"))
site = models.ForeignKey(Site, null=True,
on_delete=models.SET_NULL, verbose_name=_("Site"))
objects = FeaturedRequestManager()
|
from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from foirequest.models import FoiRequest
class FeaturedRequestManager(CurrentSiteManager):
def getFeatured(self):
try:
return self.get_query_set().order_by("-timestamp").select_related('request', 'request__publicbody')[0]
except IndexError:
return None
class FeaturedRequest(models.Model):
request = models.ForeignKey(FoiRequest,
verbose_name=_("Featured Request"))
timestamp = models.DateTimeField()
title = models.CharField(max_length=255)
text = models.TextField()
url = models.CharField(max_length=255, blank=True)
user = models.ForeignKey(User, null=True,
on_delete=models.SET_NULL,
verbose_name=_("User"))
site = models.ForeignKey(Site, null=True,
on_delete=models.SET_NULL, verbose_name=_("Site"))
objects = FeaturedRequestManager()
class Meta:
ordering = ('-timestamp',)
get_latest_by = 'timestamp'
verbose_name = _('Featured Request')
verbose_name_plural = _('Featured Requests')
|
Add meta class to FeaturedRequest model
|
Add meta class to FeaturedRequest model
|
Python
|
mit
|
ryankanno/froide,fin/froide,okfse/froide,catcosmo/froide,catcosmo/froide,okfse/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,LilithWittmann/froide,fin/froide,catcosmo/froide,ryankanno/froide,stefanw/froide,stefanw/froide,CodeforHawaii/froide,fin/froide,ryankanno/froide,ryankanno/froide,LilithWittmann/froide,okfse/froide,CodeforHawaii/froide,fin/froide,catcosmo/froide,okfse/froide,ryankanno/froide,stefanw/froide,stefanw/froide,LilithWittmann/froide,catcosmo/froide,LilithWittmann/froide,okfse/froide,stefanw/froide,CodeforHawaii/froide
|
python
|
## Code Before:
from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from foirequest.models import FoiRequest
class FeaturedRequestManager(CurrentSiteManager):
def getFeatured(self):
try:
return self.get_query_set().order_by("-timestamp").select_related('request', 'request__publicbody')[0]
except IndexError:
return None
class FeaturedRequest(models.Model):
request = models.ForeignKey(FoiRequest,
verbose_name=_("Featured Request"))
timestamp = models.DateTimeField()
title = models.CharField(max_length=255)
text = models.TextField()
url = models.CharField(max_length=255, blank=True)
user = models.ForeignKey(User, null=True,
on_delete=models.SET_NULL,
verbose_name=_("User"))
site = models.ForeignKey(Site, null=True,
on_delete=models.SET_NULL, verbose_name=_("Site"))
objects = FeaturedRequestManager()
## Instruction:
Add meta class to FeaturedRequest model
## Code After:
from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from foirequest.models import FoiRequest
class FeaturedRequestManager(CurrentSiteManager):
def getFeatured(self):
try:
return self.get_query_set().order_by("-timestamp").select_related('request', 'request__publicbody')[0]
except IndexError:
return None
class FeaturedRequest(models.Model):
request = models.ForeignKey(FoiRequest,
verbose_name=_("Featured Request"))
timestamp = models.DateTimeField()
title = models.CharField(max_length=255)
text = models.TextField()
url = models.CharField(max_length=255, blank=True)
user = models.ForeignKey(User, null=True,
on_delete=models.SET_NULL,
verbose_name=_("User"))
site = models.ForeignKey(Site, null=True,
on_delete=models.SET_NULL, verbose_name=_("Site"))
objects = FeaturedRequestManager()
class Meta:
ordering = ('-timestamp',)
get_latest_by = 'timestamp'
verbose_name = _('Featured Request')
verbose_name_plural = _('Featured Requests')
|
// ... existing code ...
on_delete=models.SET_NULL, verbose_name=_("Site"))
objects = FeaturedRequestManager()
class Meta:
ordering = ('-timestamp',)
get_latest_by = 'timestamp'
verbose_name = _('Featured Request')
verbose_name_plural = _('Featured Requests')
// ... rest of the code ...
|
5547f8a11192e9182b6d9aceef99249fc7b9d2cb
|
froide/publicbody/migrations/0007_auto_20171224_0744.py
|
froide/publicbody/migrations/0007_auto_20171224_0744.py
|
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification # Use treebeard API
# Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification.add_root(
name=pb.classification_name,
slug=pb.classification_slug
)
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
Fix pb migration, by faking treebeard
|
Fix pb migration, by faking treebeard
|
Python
|
mit
|
fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification # Use treebeard API
# Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification.add_root(
name=pb.classification_name,
slug=pb.classification_slug
)
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
## Instruction:
Fix pb migration, by faking treebeard
## Code After:
from __future__ import unicode_literals
from django.db import migrations
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0006_auto_20171224_0732'),
]
operations = [
migrations.RunPython(create_classifications),
]
|
# ... existing code ...
def create_classifications(apps, schema_editor):
from ..models import Classification as RealClassification # Use treebeard API
Classification = apps.get_model('publicbody', 'Classification')
PublicBody = apps.get_model('publicbody', 'PublicBody')
classifications = {}
for pb in PublicBody.objects.exclude(classification_slug=''):
# ... modified code ...
if pb.classification_slug in classifications:
pb.classification = classifications[pb.classification_slug]
else:
root = Classification(
name=pb.classification_name,
slug=pb.classification_slug,
depth=1,
path=RealClassification._get_path(None, 1, len(classifications))
)
root.save()
pb.classification = root
classifications[pb.classification_slug] = root
pb.save()
RealClassification.fix_tree()
class Migration(migrations.Migration):
# ... rest of the code ...
|
37cae8e6f793f8a13a4c13d5333e8a0c9290f42a
|
setup.py
|
setup.py
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov'],
cmdclass={"test": PyTest},
)
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
|
Add missing tests dependency pycodestyle.
|
Add missing tests dependency pycodestyle.
|
Python
|
mit
|
Kentzo/git-archive-all
|
python
|
## Code Before:
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov'],
cmdclass={"test": PyTest},
)
## Instruction:
Add missing tests dependency pycodestyle.
## Code After:
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
|
...
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
...
|
7aaef53e5547abfca8eb64ceb4ac477a14b79536
|
tensorflow_datasets/core/visualization/__init__.py
|
tensorflow_datasets/core/visualization/__init__.py
|
"""Visualizer utils."""
from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer
from tensorflow_datasets.core.visualization.show_examples import show_examples
from tensorflow_datasets.core.visualization.show_examples import show_statistics
from tensorflow_datasets.core.visualization.visualizer import Visualizer
__all__ = [
"ImageGridVisualizer",
"show_examples",
"Visualizer",
]
|
"""Visualizer utils."""
from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer
from tensorflow_datasets.core.visualization.show_examples import show_examples
from tensorflow_datasets.core.visualization.show_examples import show_statistics
from tensorflow_datasets.core.visualization.visualizer import Visualizer
__all__ = [
"ImageGridVisualizer",
"show_examples",
"show_statistics",
"Visualizer",
]
|
Add show_statistics to public API
|
Add show_statistics to public API
PiperOrigin-RevId: 322842576
|
Python
|
apache-2.0
|
tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets
|
python
|
## Code Before:
"""Visualizer utils."""
from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer
from tensorflow_datasets.core.visualization.show_examples import show_examples
from tensorflow_datasets.core.visualization.show_examples import show_statistics
from tensorflow_datasets.core.visualization.visualizer import Visualizer
__all__ = [
"ImageGridVisualizer",
"show_examples",
"Visualizer",
]
## Instruction:
Add show_statistics to public API
PiperOrigin-RevId: 322842576
## Code After:
"""Visualizer utils."""
from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer
from tensorflow_datasets.core.visualization.show_examples import show_examples
from tensorflow_datasets.core.visualization.show_examples import show_statistics
from tensorflow_datasets.core.visualization.visualizer import Visualizer
__all__ = [
"ImageGridVisualizer",
"show_examples",
"show_statistics",
"Visualizer",
]
|
# ... existing code ...
__all__ = [
"ImageGridVisualizer",
"show_examples",
"show_statistics",
"Visualizer",
]
# ... rest of the code ...
|
499ad0cb7147f705ebf83604b9e0873b5b0edb61
|
api/rest/scrollingpaginator.py
|
api/rest/scrollingpaginator.py
|
from rest_framework import pagination
from amcat.tools import amcates
from rest_framework.response import Response
from django.core.urlresolvers import reverse
from rest_framework.utils.urls import replace_query_param
class ScrollingPaginator(pagination.BasePagination):
def paginate_queryset(self, queryset, request, view=None):
self.request = request
es = amcates.ES()
scroll_id = request.query_params.get("scroll_id")
if scroll_id:
res = es.es.scroll(scroll_id, scroll="1m")
else:
res = es.search(scroll="1m", **queryset)
self.total = res['hits']['total']
self.scroll_id = res['_scroll_id']
self.done = not res['hits']['hits']
for hit in res['hits']['hits']:
item = {'id': hit['_id']}
if '_source' in hit:
item.update({k: v for (k, v) in hit['_source'].items()})
yield item
def get_paginated_response(self, data):
return Response({
'next': self.get_next_link(),
'results': data,
'total': self.total,
})
def get_next_link(self):
if not self.done:
url = self.request.build_absolute_uri()
return replace_query_param(url, "scroll_id", self.scroll_id)
|
from rest_framework import pagination
from amcat.tools import amcates
from rest_framework.response import Response
from django.core.urlresolvers import reverse
from rest_framework.utils.urls import replace_query_param
class ScrollingPaginator(pagination.BasePagination):
def paginate_queryset(self, queryset, request, view=None):
self.request = request
es = amcates.ES()
scroll_id = request.query_params.get("scroll_id")
scroll = request.query_params.get("scroll", "1m")
if scroll_id:
res = es.es.scroll(scroll_id, scroll=scroll)
else:
res = es.search(scroll=scroll, **queryset)
self.total = res['hits']['total']
self.scroll_id = res['_scroll_id']
self.done = not res['hits']['hits']
for hit in res['hits']['hits']:
item = {'id': hit['_id']}
if '_source' in hit:
item.update({k: v for (k, v) in hit['_source'].items()})
yield item
def get_paginated_response(self, data):
return Response({
'next': self.get_next_link(),
'results': data,
'total': self.total,
})
def get_next_link(self):
if not self.done:
url = self.request.build_absolute_uri()
return replace_query_param(url, "scroll_id", self.scroll_id)
|
Allow set scroll timeout param
|
Allow set scroll timeout param
|
Python
|
agpl-3.0
|
amcat/amcat,amcat/amcat,amcat/amcat,amcat/amcat,amcat/amcat,amcat/amcat
|
python
|
## Code Before:
from rest_framework import pagination
from amcat.tools import amcates
from rest_framework.response import Response
from django.core.urlresolvers import reverse
from rest_framework.utils.urls import replace_query_param
class ScrollingPaginator(pagination.BasePagination):
def paginate_queryset(self, queryset, request, view=None):
self.request = request
es = amcates.ES()
scroll_id = request.query_params.get("scroll_id")
if scroll_id:
res = es.es.scroll(scroll_id, scroll="1m")
else:
res = es.search(scroll="1m", **queryset)
self.total = res['hits']['total']
self.scroll_id = res['_scroll_id']
self.done = not res['hits']['hits']
for hit in res['hits']['hits']:
item = {'id': hit['_id']}
if '_source' in hit:
item.update({k: v for (k, v) in hit['_source'].items()})
yield item
def get_paginated_response(self, data):
return Response({
'next': self.get_next_link(),
'results': data,
'total': self.total,
})
def get_next_link(self):
if not self.done:
url = self.request.build_absolute_uri()
return replace_query_param(url, "scroll_id", self.scroll_id)
## Instruction:
Allow set scroll timeout param
## Code After:
from rest_framework import pagination
from amcat.tools import amcates
from rest_framework.response import Response
from django.core.urlresolvers import reverse
from rest_framework.utils.urls import replace_query_param
class ScrollingPaginator(pagination.BasePagination):
def paginate_queryset(self, queryset, request, view=None):
self.request = request
es = amcates.ES()
scroll_id = request.query_params.get("scroll_id")
scroll = request.query_params.get("scroll", "1m")
if scroll_id:
res = es.es.scroll(scroll_id, scroll=scroll)
else:
res = es.search(scroll=scroll, **queryset)
self.total = res['hits']['total']
self.scroll_id = res['_scroll_id']
self.done = not res['hits']['hits']
for hit in res['hits']['hits']:
item = {'id': hit['_id']}
if '_source' in hit:
item.update({k: v for (k, v) in hit['_source'].items()})
yield item
def get_paginated_response(self, data):
return Response({
'next': self.get_next_link(),
'results': data,
'total': self.total,
})
def get_next_link(self):
if not self.done:
url = self.request.build_absolute_uri()
return replace_query_param(url, "scroll_id", self.scroll_id)
|
...
es = amcates.ES()
scroll_id = request.query_params.get("scroll_id")
scroll = request.query_params.get("scroll", "1m")
if scroll_id:
res = es.es.scroll(scroll_id, scroll=scroll)
else:
res = es.search(scroll=scroll, **queryset)
self.total = res['hits']['total']
self.scroll_id = res['_scroll_id']
self.done = not res['hits']['hits']
...
|
eb8743da1aa5fbcdf0ea4aab0bbd4130b80c18cb
|
ktor-hosts/ktor-netty/src/org/jetbrains/ktor/netty/CIO.kt
|
ktor-hosts/ktor-netty/src/org/jetbrains/ktor/netty/CIO.kt
|
package org.jetbrains.ktor.netty
import io.netty.channel.*
import kotlin.coroutines.experimental.*
suspend fun ChannelFuture.suspendAwait() {
if (isDone) return
suspendCoroutine<Unit> { continuation ->
addListener { f ->
try {
f.get()
continuation.resume(Unit)
} catch (t: Throwable) {
continuation.resumeWithException(t)
}
}
}
}
|
package org.jetbrains.ktor.netty
import io.netty.channel.*
import kotlin.coroutines.experimental.*
suspend fun ChannelFuture.suspendAwait() {
if (isDone) return
suspendCoroutine<Unit> { continuation ->
addListener { f ->
try {
f.get()
} catch (t: Throwable) {
continuation.resumeWithException(t)
return@addListener
}
continuation.resume(Unit)
}
}
}
|
Fix exception handling in awaiting for a channel future
|
Fix exception handling in awaiting for a channel future
|
Kotlin
|
apache-2.0
|
ktorio/ktor,ktorio/ktor,ktorio/ktor,ktorio/ktor
|
kotlin
|
## Code Before:
package org.jetbrains.ktor.netty
import io.netty.channel.*
import kotlin.coroutines.experimental.*
suspend fun ChannelFuture.suspendAwait() {
if (isDone) return
suspendCoroutine<Unit> { continuation ->
addListener { f ->
try {
f.get()
continuation.resume(Unit)
} catch (t: Throwable) {
continuation.resumeWithException(t)
}
}
}
}
## Instruction:
Fix exception handling in awaiting for a channel future
## Code After:
package org.jetbrains.ktor.netty
import io.netty.channel.*
import kotlin.coroutines.experimental.*
suspend fun ChannelFuture.suspendAwait() {
if (isDone) return
suspendCoroutine<Unit> { continuation ->
addListener { f ->
try {
f.get()
} catch (t: Throwable) {
continuation.resumeWithException(t)
return@addListener
}
continuation.resume(Unit)
}
}
}
|
// ... existing code ...
addListener { f ->
try {
f.get()
} catch (t: Throwable) {
continuation.resumeWithException(t)
return@addListener
}
continuation.resume(Unit)
}
}
}
// ... rest of the code ...
|
630a8683ba748f130bbb70c285d30142e50cd8ba
|
playlist_kreator/gmusic.py
|
playlist_kreator/gmusic.py
|
from gmusicapi import Mobileclient
def create_playlist(playlist_name, artists, email, password, max_top_tracks=2):
api = Mobileclient()
logged_in = api.login(email, password, Mobileclient.FROM_MAC_ADDRESS)
if not logged_in:
raise Exception('Could not connect')
song_ids = []
for artist_name in artists:
search = api.search(artist_name)
if len(search["artist_hits"]) == 0:
print('{}: Does not exist in Google Music. Skipping'.format(artist_name))
else:
artist_id = search["artist_hits"][0]["artist"]["artistId"]
artist = api.get_artist_info(artist_id, include_albums=False,
max_top_tracks=max_top_tracks, max_rel_artist=0)
if 'topTracks' not in artist:
print('{}: Exists but no songs found on Google Music. Skipping'.format(artist_name))
else:
song_ids = song_ids + [track['nid'] for track in artist['topTracks']]
print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks'])))
playlist_id = api.create_playlist(playlist_name)
print('\nCreated playlist {} ({})'.format(playlist_name, playlist_id))
api.add_songs_to_playlist(playlist_id, song_ids)
print('Added {} songs to the playlist'.format(len(song_ids)))
print('All done, well done. Enjoy!')
|
from gmusicapi import Mobileclient
def create_playlist(playlist_name, artists, email, password, max_top_tracks=2):
api = Mobileclient()
logged_in = api.login(email, password, Mobileclient.FROM_MAC_ADDRESS)
if not logged_in:
raise Exception('Could not connect')
song_ids = []
for artist_name in artists:
search = api.search(artist_name)
if len(search["artist_hits"]) == 0:
print('{}: Does not exist in Google Music. Skipping'.format(artist_name))
else:
artist_id = search["artist_hits"][0]["artist"]["artistId"]
artist = api.get_artist_info(artist_id, include_albums=False,
max_top_tracks=max_top_tracks, max_rel_artist=0)
if 'topTracks' not in artist:
print('{}: Exists but no songs found on Google Music. Skipping'.format(artist_name))
else:
song_ids = song_ids + [track['nid'] for track in artist['topTracks']]
print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks'])))
playlist_id = api.create_playlist(playlist_name)
print('\nCreated playlist "{}" ({})'.format(playlist_name, playlist_id))
api.add_songs_to_playlist(playlist_id, song_ids)
print('Added {} songs to the playlist'.format(len(song_ids)))
print('All done. Enjoy! 🤘')
|
Change logs for google music
|
Change logs for google music
|
Python
|
mit
|
epayet/playlist_kreator,epayet/playlist_kreator
|
python
|
## Code Before:
from gmusicapi import Mobileclient
def create_playlist(playlist_name, artists, email, password, max_top_tracks=2):
api = Mobileclient()
logged_in = api.login(email, password, Mobileclient.FROM_MAC_ADDRESS)
if not logged_in:
raise Exception('Could not connect')
song_ids = []
for artist_name in artists:
search = api.search(artist_name)
if len(search["artist_hits"]) == 0:
print('{}: Does not exist in Google Music. Skipping'.format(artist_name))
else:
artist_id = search["artist_hits"][0]["artist"]["artistId"]
artist = api.get_artist_info(artist_id, include_albums=False,
max_top_tracks=max_top_tracks, max_rel_artist=0)
if 'topTracks' not in artist:
print('{}: Exists but no songs found on Google Music. Skipping'.format(artist_name))
else:
song_ids = song_ids + [track['nid'] for track in artist['topTracks']]
print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks'])))
playlist_id = api.create_playlist(playlist_name)
print('\nCreated playlist {} ({})'.format(playlist_name, playlist_id))
api.add_songs_to_playlist(playlist_id, song_ids)
print('Added {} songs to the playlist'.format(len(song_ids)))
print('All done, well done. Enjoy!')
## Instruction:
Change logs for google music
## Code After:
from gmusicapi import Mobileclient
def create_playlist(playlist_name, artists, email, password, max_top_tracks=2):
api = Mobileclient()
logged_in = api.login(email, password, Mobileclient.FROM_MAC_ADDRESS)
if not logged_in:
raise Exception('Could not connect')
song_ids = []
for artist_name in artists:
search = api.search(artist_name)
if len(search["artist_hits"]) == 0:
print('{}: Does not exist in Google Music. Skipping'.format(artist_name))
else:
artist_id = search["artist_hits"][0]["artist"]["artistId"]
artist = api.get_artist_info(artist_id, include_albums=False,
max_top_tracks=max_top_tracks, max_rel_artist=0)
if 'topTracks' not in artist:
print('{}: Exists but no songs found on Google Music. Skipping'.format(artist_name))
else:
song_ids = song_ids + [track['nid'] for track in artist['topTracks']]
print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks'])))
playlist_id = api.create_playlist(playlist_name)
print('\nCreated playlist "{}" ({})'.format(playlist_name, playlist_id))
api.add_songs_to_playlist(playlist_id, song_ids)
print('Added {} songs to the playlist'.format(len(song_ids)))
print('All done. Enjoy! 🤘')
|
// ... existing code ...
print('{}: Found {} song(s). Will add'.format(artist_name, len(artist['topTracks'])))
playlist_id = api.create_playlist(playlist_name)
print('\nCreated playlist "{}" ({})'.format(playlist_name, playlist_id))
api.add_songs_to_playlist(playlist_id, song_ids)
print('Added {} songs to the playlist'.format(len(song_ids)))
print('All done. Enjoy! 🤘')
// ... rest of the code ...
|
9d66600518ec05dae2be62da0bbe2c15ddccce9d
|
spicedham/__init__.py
|
spicedham/__init__.py
|
from pkg_resources import iter_entry_points
from spicedham.config import load_config
_plugins = None
def load_plugins():
"""
If not already loaded, load plugins.
"""
global _plugins
if _plugins == None:
load_config()
_plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
_plugins.append(pluginClass())
def train(tag, training_data, is_spam):
"""
Calls each plugin's train function.
"""
for plugin in _plugins:
plugin.train(tag, training_data, is_spam)
def classify(tag, classification_data):
"""
Calls each plugin's classify function and averages the results.
"""
average_score = 0
total = 0
for plugin in _plugins:
value = plugin.classify(tag, classification_data)
# Skip _plugins which give a score of None
if value != None:
total += 1
average_score += value
# On rare occasions no _plugins will give scores. If so, return 0
if total > 0:
return average_score / total
else:
return 0
|
from pkg_resources import iter_entry_points
from spicedham.config import load_config
from spicedham.backend import load_backend
_plugins = None
def load_plugins():
"""
If not already loaded, load plugins.
"""
global _plugins
if _plugins == None:
# In order to use the plugins config and backend must be loaded.
load_backend()
load_config()
_plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
_plugins.append(pluginClass())
def train(tag, training_data, is_spam):
"""
Calls each plugin's train function.
"""
for plugin in _plugins:
plugin.train(tag, training_data, is_spam)
def classify(tag, classification_data):
"""
Calls each plugin's classify function and averages the results.
"""
average_score = 0
total = 0
for plugin in _plugins:
value = plugin.classify(tag, classification_data)
# Skip _plugins which give a score of None
if value != None:
total += 1
average_score += value
# On rare occasions no _plugins will give scores. If so, return 0
if total > 0:
return average_score / total
else:
return 0
|
Make sure to load_backend as part of load_plugins
|
Make sure to load_backend as part of load_plugins
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
python
|
## Code Before:
from pkg_resources import iter_entry_points
from spicedham.config import load_config
_plugins = None
def load_plugins():
"""
If not already loaded, load plugins.
"""
global _plugins
if _plugins == None:
load_config()
_plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
_plugins.append(pluginClass())
def train(tag, training_data, is_spam):
"""
Calls each plugin's train function.
"""
for plugin in _plugins:
plugin.train(tag, training_data, is_spam)
def classify(tag, classification_data):
"""
Calls each plugin's classify function and averages the results.
"""
average_score = 0
total = 0
for plugin in _plugins:
value = plugin.classify(tag, classification_data)
# Skip _plugins which give a score of None
if value != None:
total += 1
average_score += value
# On rare occasions no _plugins will give scores. If so, return 0
if total > 0:
return average_score / total
else:
return 0
## Instruction:
Make sure to load_backend as part of load_plugins
## Code After:
from pkg_resources import iter_entry_points
from spicedham.config import load_config
from spicedham.backend import load_backend
_plugins = None
def load_plugins():
"""
If not already loaded, load plugins.
"""
global _plugins
if _plugins == None:
# In order to use the plugins config and backend must be loaded.
load_backend()
load_config()
_plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
_plugins.append(pluginClass())
def train(tag, training_data, is_spam):
"""
Calls each plugin's train function.
"""
for plugin in _plugins:
plugin.train(tag, training_data, is_spam)
def classify(tag, classification_data):
"""
Calls each plugin's classify function and averages the results.
"""
average_score = 0
total = 0
for plugin in _plugins:
value = plugin.classify(tag, classification_data)
# Skip _plugins which give a score of None
if value != None:
total += 1
average_score += value
# On rare occasions no _plugins will give scores. If so, return 0
if total > 0:
return average_score / total
else:
return 0
|
// ... existing code ...
from pkg_resources import iter_entry_points
from spicedham.config import load_config
from spicedham.backend import load_backend
_plugins = None
// ... modified code ...
"""
global _plugins
if _plugins == None:
# In order to use the plugins config and backend must be loaded.
load_backend()
load_config()
_plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
// ... rest of the code ...
|
13059383a02834a8932b1f465755f44446ab6f95
|
include/itkTextProgressBarCommand.h
|
include/itkTextProgressBarCommand.h
|
/*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef itkTextProgressBarCommand_h
#define itkTextProgressBarCommand_h
#include "itkCommand.h"
#include <string>
namespace itk
{
/** \class TextProgressBarCommand
*
* \brief A simple command that outputs a text progress bar the associated filter.
*
* \ingroup Ultrasound
* */
class TextProgressBarCommand:
public Command
{
public:
typedef TextProgressBarCommand Self;
typedef Command Superclass;
typedef SmartPointer< Self > Pointer;
itkNewMacro( Self );
protected:
TextProgressBarCommand();
void Execute(itk::Object *caller, const itk::EventObject & event) override;
void Execute(const itk::Object * object, const itk::EventObject & event) override;
std::string m_Progress;
};
} // end namespace itk
#endif
|
/*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef itkTextProgressBarCommand_h
#define itkTextProgressBarCommand_h
#include "itkCommand.h"
#include "UltrasoundExport.h"
#include <string>
namespace itk
{
/** \class TextProgressBarCommand
*
* \brief A simple command that outputs a text progress bar the associated filter.
*
* \ingroup Ultrasound
* */
class Ultrasound_EXPORT TextProgressBarCommand:
public Command
{
public:
typedef TextProgressBarCommand Self;
typedef Command Superclass;
typedef SmartPointer< Self > Pointer;
itkNewMacro( Self );
protected:
TextProgressBarCommand();
void Execute(itk::Object *caller, const itk::EventObject & event) override;
void Execute(const itk::Object * object, const itk::EventObject & event) override;
std::string m_Progress;
};
} // end namespace itk
#endif
|
Add missing export specification for TextProgressBarCommand
|
COMP: Add missing export specification for TextProgressBarCommand
|
C
|
apache-2.0
|
thewtex/ITKUltrasound,thewtex/ITKUltrasound,KitwareMedical/ITKUltrasound,thewtex/ITKUltrasound,KitwareMedical/ITKUltrasound,KitwareMedical/ITKUltrasound
|
c
|
## Code Before:
/*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef itkTextProgressBarCommand_h
#define itkTextProgressBarCommand_h
#include "itkCommand.h"
#include <string>
namespace itk
{
/** \class TextProgressBarCommand
*
* \brief A simple command that outputs a text progress bar the associated filter.
*
* \ingroup Ultrasound
* */
class TextProgressBarCommand:
public Command
{
public:
typedef TextProgressBarCommand Self;
typedef Command Superclass;
typedef SmartPointer< Self > Pointer;
itkNewMacro( Self );
protected:
TextProgressBarCommand();
void Execute(itk::Object *caller, const itk::EventObject & event) override;
void Execute(const itk::Object * object, const itk::EventObject & event) override;
std::string m_Progress;
};
} // end namespace itk
#endif
## Instruction:
COMP: Add missing export specification for TextProgressBarCommand
## Code After:
/*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef itkTextProgressBarCommand_h
#define itkTextProgressBarCommand_h
#include "itkCommand.h"
#include "UltrasoundExport.h"
#include <string>
namespace itk
{
/** \class TextProgressBarCommand
*
* \brief A simple command that outputs a text progress bar the associated filter.
*
* \ingroup Ultrasound
* */
class Ultrasound_EXPORT TextProgressBarCommand:
public Command
{
public:
typedef TextProgressBarCommand Self;
typedef Command Superclass;
typedef SmartPointer< Self > Pointer;
itkNewMacro( Self );
protected:
TextProgressBarCommand();
void Execute(itk::Object *caller, const itk::EventObject & event) override;
void Execute(const itk::Object * object, const itk::EventObject & event) override;
std::string m_Progress;
};
} // end namespace itk
#endif
|
...
#include "itkCommand.h"
#include "UltrasoundExport.h"
#include <string>
namespace itk
...
*
* \ingroup Ultrasound
* */
class Ultrasound_EXPORT TextProgressBarCommand:
public Command
{
public:
...
|
ff9cdc8657a308caadd1b418d1f6e73a8a923586
|
src/radicalzephyr/boot_junit/test/CalculatorTest.java
|
src/radicalzephyr/boot_junit/test/CalculatorTest.java
|
package radicalzephyr.boot_junit.test;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import net.zephyrizing.http_server.Calculator;
public class CalculatorTest {
@Test
public void evaluatesExpression() {
Calculator calculator = new Calculator();
int sum = calculator.evaluate("1+2+3");
assertEquals(6, 5);
}
}
|
package radicalzephyr.boot_junit.test;
import radicalzephyr.boot_junit.test.Calculator;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class CalculatorTest {
@Test
public void evaluatesExpression() {
Calculator calculator = new Calculator();
int sum = calculator.evaluate("1+2+3");
assertEquals(6, 5);
}
}
|
Fix the test class to compile
|
Fix the test class to compile
|
Java
|
epl-1.0
|
RadicalZephyr/boot-junit
|
java
|
## Code Before:
package radicalzephyr.boot_junit.test;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import net.zephyrizing.http_server.Calculator;
public class CalculatorTest {
@Test
public void evaluatesExpression() {
Calculator calculator = new Calculator();
int sum = calculator.evaluate("1+2+3");
assertEquals(6, 5);
}
}
## Instruction:
Fix the test class to compile
## Code After:
package radicalzephyr.boot_junit.test;
import radicalzephyr.boot_junit.test.Calculator;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class CalculatorTest {
@Test
public void evaluatesExpression() {
Calculator calculator = new Calculator();
int sum = calculator.evaluate("1+2+3");
assertEquals(6, 5);
}
}
|
// ... existing code ...
package radicalzephyr.boot_junit.test;
import radicalzephyr.boot_junit.test.Calculator;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class CalculatorTest {
@Test
// ... rest of the code ...
|
d5c59c018ba7558a9d21370d7eb58ab590779cf1
|
plugins/autojoin/plugin_tests/autojoin_test.py
|
plugins/autojoin/plugin_tests/autojoin_test.py
|
from tests import base
def setUpModule():
base.enabledPlugins.append('autojoin')
base.startServer()
def tearDownModule():
base.stopServer()
class AutoJoinTest(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
|
from girder.constants import AccessType
from tests import base
import json
def setUpModule():
base.enabledPlugins.append('autojoin')
base.startServer()
def tearDownModule():
base.stopServer()
class AutoJoinTest(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
self.users = [self.model('user').createUser(
'usr%s' % num, 'passwd', 'tst', 'usr', 'u%[email protected]' % num)
for num in [0, 1]]
def testCuration(self):
admin, user = self.users
# create some groups
g1 = self.model('group').createGroup('g1', admin)
g2 = self.model('group').createGroup('g2', admin)
g3 = self.model('group').createGroup('g3', admin)
# set auto join rules
rules = [
{
'pattern': '@kitware.com',
'groupId': str(g1['_id']),
'level': AccessType.ADMIN
},
{
'pattern': '@example.com',
'groupId': str(g2['_id']),
'level': AccessType.READ
},
{
'pattern': '@example.com',
'groupId': str(g3['_id']),
'level': AccessType.WRITE
},
]
params = {
'list': json.dumps([{'key': 'autojoin', 'value': rules}])
}
resp = self.request(
'/system/setting', user=admin, method='PUT', params=params)
self.assertStatusOk(resp)
# create users
user1 = self.model('user').createUser(
'user1', 'password', 'John', 'Doe', '[email protected]')
user2 = self.model('user').createUser(
'user2', 'password', 'John', 'Doe', '[email protected]')
user3 = self.model('user').createUser(
'user3', 'password', 'John', 'Doe', '[email protected]')
# check correct groups were joined
self.assertEqual(user1['groups'], [g2['_id'], g3['_id']])
self.assertEqual(user2['groups'], [g1['_id']])
self.assertEqual(user3['groups'], [])
# check correct access levels
g1 = self.model('group').load(g1['_id'], force=True)
g2 = self.model('group').load(g2['_id'], force=True)
g3 = self.model('group').load(g3['_id'], force=True)
self.assertTrue(
{u'id': user2['_id'], u'level': AccessType.ADMIN} in
g1['access']['users'])
self.assertTrue(
{u'id': user1['_id'], u'level': AccessType.WRITE} in
g3['access']['users'])
|
Add server tests for auto join plugin
|
Add server tests for auto join plugin
|
Python
|
apache-2.0
|
kotfic/girder,kotfic/girder,adsorensen/girder,jbeezley/girder,data-exp-lab/girder,girder/girder,sutartmelson/girder,Kitware/girder,girder/girder,sutartmelson/girder,jbeezley/girder,RafaelPalomar/girder,girder/girder,adsorensen/girder,RafaelPalomar/girder,manthey/girder,manthey/girder,data-exp-lab/girder,RafaelPalomar/girder,Kitware/girder,RafaelPalomar/girder,manthey/girder,Kitware/girder,data-exp-lab/girder,sutartmelson/girder,data-exp-lab/girder,kotfic/girder,girder/girder,jbeezley/girder,manthey/girder,adsorensen/girder,Xarthisius/girder,kotfic/girder,Kitware/girder,adsorensen/girder,data-exp-lab/girder,RafaelPalomar/girder,jbeezley/girder,Xarthisius/girder,sutartmelson/girder,Xarthisius/girder,adsorensen/girder,kotfic/girder,Xarthisius/girder,sutartmelson/girder,Xarthisius/girder
|
python
|
## Code Before:
from tests import base
def setUpModule():
base.enabledPlugins.append('autojoin')
base.startServer()
def tearDownModule():
base.stopServer()
class AutoJoinTest(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
## Instruction:
Add server tests for auto join plugin
## Code After:
from girder.constants import AccessType
from tests import base
import json
def setUpModule():
base.enabledPlugins.append('autojoin')
base.startServer()
def tearDownModule():
base.stopServer()
class AutoJoinTest(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
self.users = [self.model('user').createUser(
'usr%s' % num, 'passwd', 'tst', 'usr', 'u%[email protected]' % num)
for num in [0, 1]]
def testCuration(self):
admin, user = self.users
# create some groups
g1 = self.model('group').createGroup('g1', admin)
g2 = self.model('group').createGroup('g2', admin)
g3 = self.model('group').createGroup('g3', admin)
# set auto join rules
rules = [
{
'pattern': '@kitware.com',
'groupId': str(g1['_id']),
'level': AccessType.ADMIN
},
{
'pattern': '@example.com',
'groupId': str(g2['_id']),
'level': AccessType.READ
},
{
'pattern': '@example.com',
'groupId': str(g3['_id']),
'level': AccessType.WRITE
},
]
params = {
'list': json.dumps([{'key': 'autojoin', 'value': rules}])
}
resp = self.request(
'/system/setting', user=admin, method='PUT', params=params)
self.assertStatusOk(resp)
# create users
user1 = self.model('user').createUser(
'user1', 'password', 'John', 'Doe', '[email protected]')
user2 = self.model('user').createUser(
'user2', 'password', 'John', 'Doe', '[email protected]')
user3 = self.model('user').createUser(
'user3', 'password', 'John', 'Doe', '[email protected]')
# check correct groups were joined
self.assertEqual(user1['groups'], [g2['_id'], g3['_id']])
self.assertEqual(user2['groups'], [g1['_id']])
self.assertEqual(user3['groups'], [])
# check correct access levels
g1 = self.model('group').load(g1['_id'], force=True)
g2 = self.model('group').load(g2['_id'], force=True)
g3 = self.model('group').load(g3['_id'], force=True)
self.assertTrue(
{u'id': user2['_id'], u'level': AccessType.ADMIN} in
g1['access']['users'])
self.assertTrue(
{u'id': user1['_id'], u'level': AccessType.WRITE} in
g3['access']['users'])
|
// ... existing code ...
from girder.constants import AccessType
from tests import base
import json
def setUpModule():
// ... modified code ...
def setUp(self):
base.TestCase.setUp(self)
self.users = [self.model('user').createUser(
'usr%s' % num, 'passwd', 'tst', 'usr', 'u%[email protected]' % num)
for num in [0, 1]]
def testCuration(self):
admin, user = self.users
# create some groups
g1 = self.model('group').createGroup('g1', admin)
g2 = self.model('group').createGroup('g2', admin)
g3 = self.model('group').createGroup('g3', admin)
# set auto join rules
rules = [
{
'pattern': '@kitware.com',
'groupId': str(g1['_id']),
'level': AccessType.ADMIN
},
{
'pattern': '@example.com',
'groupId': str(g2['_id']),
'level': AccessType.READ
},
{
'pattern': '@example.com',
'groupId': str(g3['_id']),
'level': AccessType.WRITE
},
]
params = {
'list': json.dumps([{'key': 'autojoin', 'value': rules}])
}
resp = self.request(
'/system/setting', user=admin, method='PUT', params=params)
self.assertStatusOk(resp)
# create users
user1 = self.model('user').createUser(
'user1', 'password', 'John', 'Doe', '[email protected]')
user2 = self.model('user').createUser(
'user2', 'password', 'John', 'Doe', '[email protected]')
user3 = self.model('user').createUser(
'user3', 'password', 'John', 'Doe', '[email protected]')
# check correct groups were joined
self.assertEqual(user1['groups'], [g2['_id'], g3['_id']])
self.assertEqual(user2['groups'], [g1['_id']])
self.assertEqual(user3['groups'], [])
# check correct access levels
g1 = self.model('group').load(g1['_id'], force=True)
g2 = self.model('group').load(g2['_id'], force=True)
g3 = self.model('group').load(g3['_id'], force=True)
self.assertTrue(
{u'id': user2['_id'], u'level': AccessType.ADMIN} in
g1['access']['users'])
self.assertTrue(
{u'id': user1['_id'], u'level': AccessType.WRITE} in
g3['access']['users'])
// ... rest of the code ...
|
1b066f793c6c3f8f8b1e9df2659922e2a5dbaf3a
|
challenge_5/python/alexbotello/FindTheDifference.py
|
challenge_5/python/alexbotello/FindTheDifference.py
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = dict(Counter(s))
t = dict(Counter(t))
for key in t.keys():
if key not in s.keys():
s[key] = 0
if s[key] - t[key] <= -1:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
Refactor to remove nested for loop
|
Refactor to remove nested for loop
|
Python
|
mit
|
mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges
|
python
|
## Code Before:
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = dict(Counter(s))
t = dict(Counter(t))
for key in t.keys():
if key not in s.keys():
s[key] = 0
if s[key] - t[key] <= -1:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
## Instruction:
Refactor to remove nested for loop
## Code After:
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
...
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
...
|
cb3e05bc8b912e716811406333c5deedc06824be
|
src/condor_ckpt/condor_syscalls.h
|
src/condor_ckpt/condor_syscalls.h
|
typedef int BOOL;
static const int SYS_LOCAL = 1;
static const int SYS_REMOTE = 0;
static const int SYS_RECORDED = 2;
static const int SYS_MAPPED = 2;
static const int SYS_UNRECORDED = 0;
static const int SYS_UNMAPPED = 0;
#if defined(__cplusplus)
extern "C" {
#endif
int SetSyscalls( int mode );
BOOL LocalSysCalls();
BOOL RemoteSysCalls();
BOOL MappingFileDescriptors();
int REMOTE_syscall( int syscall_num, ... );
int syscall( int, ... );
#if defined(__cplusplus)
}
#endif
#endif
|
typedef int BOOL;
static const int SYS_LOCAL = 1;
static const int SYS_REMOTE = 0;
static const int SYS_RECORDED = 2;
static const int SYS_MAPPED = 2;
static const int SYS_UNRECORDED = 0;
static const int SYS_UNMAPPED = 0;
#if defined(__cplusplus)
extern "C" {
#endif
int SetSyscalls( int mode );
BOOL LocalSysCalls();
BOOL RemoteSysCalls();
BOOL MappingFileDescriptors();
int REMOTE_syscall( int syscall_num, ... );
#if defined(AIX32) && defined(__cplusplus)
int syscall( ... );
#else
int syscall( int, ... );
#endif
#if defined(__cplusplus)
}
#endif
#endif
|
Add inclusion of "syscall.aix32.h" or <syscall.h> depending on whether we are on an AIX machine where there is no <syscall.h>.
|
Add inclusion of "syscall.aix32.h" or <syscall.h> depending on whether
we are on an AIX machine where there is no <syscall.h>.
|
C
|
apache-2.0
|
djw8605/htcondor,htcondor/htcondor,clalancette/condor-dcloud,djw8605/condor,zhangzhehust/htcondor,neurodebian/htcondor,mambelli/osg-bosco-marco,neurodebian/htcondor,htcondor/htcondor,zhangzhehust/htcondor,neurodebian/htcondor,zhangzhehust/htcondor,zhangzhehust/htcondor,mambelli/osg-bosco-marco,htcondor/htcondor,djw8605/condor,mambelli/osg-bosco-marco,bbockelm/condor-network-accounting,djw8605/htcondor,djw8605/condor,djw8605/condor,djw8605/htcondor,neurodebian/htcondor,clalancette/condor-dcloud,zhangzhehust/htcondor,bbockelm/condor-network-accounting,djw8605/htcondor,bbockelm/condor-network-accounting,djw8605/condor,neurodebian/htcondor,clalancette/condor-dcloud,htcondor/htcondor,djw8605/htcondor,djw8605/condor,mambelli/osg-bosco-marco,htcondor/htcondor,clalancette/condor-dcloud,neurodebian/htcondor,djw8605/htcondor,neurodebian/htcondor,clalancette/condor-dcloud,djw8605/htcondor,htcondor/htcondor,neurodebian/htcondor,mambelli/osg-bosco-marco,zhangzhehust/htcondor,mambelli/osg-bosco-marco,neurodebian/htcondor,djw8605/condor,djw8605/htcondor,mambelli/osg-bosco-marco,htcondor/htcondor,bbockelm/condor-network-accounting,zhangzhehust/htcondor,djw8605/condor,mambelli/osg-bosco-marco,clalancette/condor-dcloud,clalancette/condor-dcloud,htcondor/htcondor,zhangzhehust/htcondor,bbockelm/condor-network-accounting,djw8605/htcondor,bbockelm/condor-network-accounting,bbockelm/condor-network-accounting,zhangzhehust/htcondor,bbockelm/condor-network-accounting
|
c
|
## Code Before:
typedef int BOOL;
static const int SYS_LOCAL = 1;
static const int SYS_REMOTE = 0;
static const int SYS_RECORDED = 2;
static const int SYS_MAPPED = 2;
static const int SYS_UNRECORDED = 0;
static const int SYS_UNMAPPED = 0;
#if defined(__cplusplus)
extern "C" {
#endif
int SetSyscalls( int mode );
BOOL LocalSysCalls();
BOOL RemoteSysCalls();
BOOL MappingFileDescriptors();
int REMOTE_syscall( int syscall_num, ... );
int syscall( int, ... );
#if defined(__cplusplus)
}
#endif
#endif
## Instruction:
Add inclusion of "syscall.aix32.h" or <syscall.h> depending on whether
we are on an AIX machine where there is no <syscall.h>.
## Code After:
typedef int BOOL;
static const int SYS_LOCAL = 1;
static const int SYS_REMOTE = 0;
static const int SYS_RECORDED = 2;
static const int SYS_MAPPED = 2;
static const int SYS_UNRECORDED = 0;
static const int SYS_UNMAPPED = 0;
#if defined(__cplusplus)
extern "C" {
#endif
int SetSyscalls( int mode );
BOOL LocalSysCalls();
BOOL RemoteSysCalls();
BOOL MappingFileDescriptors();
int REMOTE_syscall( int syscall_num, ... );
#if defined(AIX32) && defined(__cplusplus)
int syscall( ... );
#else
int syscall( int, ... );
#endif
#if defined(__cplusplus)
}
#endif
#endif
|
# ... existing code ...
BOOL RemoteSysCalls();
BOOL MappingFileDescriptors();
int REMOTE_syscall( int syscall_num, ... );
#if defined(AIX32) && defined(__cplusplus)
int syscall( ... );
#else
int syscall( int, ... );
#endif
#if defined(__cplusplus)
# ... rest of the code ...
|
60d7f3e5e10f5d238f7b2b6954c2215445acfd3f
|
src/uk/co/sftrabbit/stackanswers/AuthenticationActivity.java
|
src/uk/co/sftrabbit/stackanswers/AuthenticationActivity.java
|
package uk.co.sftrabbit.stackanswers;
import android.os.Bundle;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import uk.co.sftrabbit.stackanswers.app.DrawerActivity;
public class AuthenticationActivity extends DrawerActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_authentication);
setDrawerIndicatorEnabled(false);
WebView webView = (WebView) findViewById(R.id.authentication_web_view);
webView.setWebViewClient(new WebViewClient());
WebSettings webSettings = webView.getSettings();
webSettings.setJavaScriptEnabled(true);
webView.loadUrl(
"https://stackexchange.com/oauth/dialog?" +
"client_id=2265&" +
"scope=read_inbox,write_access,private_info&" +
"redirect_uri=https://stackexchange.com/oauth/login_success"
);
}
}
|
package uk.co.sftrabbit.stackanswers;
import android.os.Bundle;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.ProgressBar;
import uk.co.sftrabbit.stackanswers.app.DrawerActivity;
public class AuthenticationActivity extends DrawerActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_authentication);
setDrawerIndicatorEnabled(false);
final ProgressBar progressBar =
(ProgressBar) findViewById(R.id.authentication_progress);
WebView webView = (WebView) findViewById(R.id.authentication_web_view);
webView.setWebViewClient(new WebViewClient());
webView.setWebChromeClient(new WebChromeClient() {
@Override
public void onProgressChanged(WebView webview, int newProgress) {
progressBar.setProgress(newProgress);
}
});
WebSettings webSettings = webView.getSettings();
webSettings.setJavaScriptEnabled(true);
webView.loadUrl(
"https://stackexchange.com/oauth/dialog?" +
"client_id=2265&" +
"scope=read_inbox,write_access,private_info&" +
"redirect_uri=https://stackexchange.com/oauth/login_success"
);
}
}
|
Update progress bar as authentication pages load
|
Update progress bar as authentication pages load
|
Java
|
mit
|
sftrabbit/StackAnswers
|
java
|
## Code Before:
package uk.co.sftrabbit.stackanswers;
import android.os.Bundle;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import uk.co.sftrabbit.stackanswers.app.DrawerActivity;
public class AuthenticationActivity extends DrawerActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_authentication);
setDrawerIndicatorEnabled(false);
WebView webView = (WebView) findViewById(R.id.authentication_web_view);
webView.setWebViewClient(new WebViewClient());
WebSettings webSettings = webView.getSettings();
webSettings.setJavaScriptEnabled(true);
webView.loadUrl(
"https://stackexchange.com/oauth/dialog?" +
"client_id=2265&" +
"scope=read_inbox,write_access,private_info&" +
"redirect_uri=https://stackexchange.com/oauth/login_success"
);
}
}
## Instruction:
Update progress bar as authentication pages load
## Code After:
package uk.co.sftrabbit.stackanswers;
import android.os.Bundle;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.ProgressBar;
import uk.co.sftrabbit.stackanswers.app.DrawerActivity;
public class AuthenticationActivity extends DrawerActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_authentication);
setDrawerIndicatorEnabled(false);
final ProgressBar progressBar =
(ProgressBar) findViewById(R.id.authentication_progress);
WebView webView = (WebView) findViewById(R.id.authentication_web_view);
webView.setWebViewClient(new WebViewClient());
webView.setWebChromeClient(new WebChromeClient() {
@Override
public void onProgressChanged(WebView webview, int newProgress) {
progressBar.setProgress(newProgress);
}
});
WebSettings webSettings = webView.getSettings();
webSettings.setJavaScriptEnabled(true);
webView.loadUrl(
"https://stackexchange.com/oauth/dialog?" +
"client_id=2265&" +
"scope=read_inbox,write_access,private_info&" +
"redirect_uri=https://stackexchange.com/oauth/login_success"
);
}
}
|
...
package uk.co.sftrabbit.stackanswers;
import android.os.Bundle;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.ProgressBar;
import uk.co.sftrabbit.stackanswers.app.DrawerActivity;
public class AuthenticationActivity extends DrawerActivity {
...
setDrawerIndicatorEnabled(false);
final ProgressBar progressBar =
(ProgressBar) findViewById(R.id.authentication_progress);
WebView webView = (WebView) findViewById(R.id.authentication_web_view);
webView.setWebViewClient(new WebViewClient());
webView.setWebChromeClient(new WebChromeClient() {
@Override
public void onProgressChanged(WebView webview, int newProgress) {
progressBar.setProgress(newProgress);
}
});
WebSettings webSettings = webView.getSettings();
webSettings.setJavaScriptEnabled(true);
...
|
ab6526b14f5bdc544367bcaa281a861d2314330b
|
gi2fasta.py
|
gi2fasta.py
|
import sys
from Bio import Entrez
from Bio import SeqIO
Entrez.email = "[email protected]"
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
|
import sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
|
Add error message if User does not enter email address
|
Add error message if User does not enter email address
|
Python
|
bsd-2-clause
|
datadaveshin/bioinformatics,datadaveshin/bioinformatics
|
python
|
## Code Before:
import sys
from Bio import Entrez
from Bio import SeqIO
Entrez.email = "[email protected]"
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
## Instruction:
Add error message if User does not enter email address
## Code After:
import sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
|
...
import sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
...
|
10e3c7b8dbc4befa2533de1a07f1f7827b961f81
|
rejected/__init__.py
|
rejected/__init__.py
|
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import (
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer) # noqa E402
__author__ = 'Gavin M. Roy <[email protected]>'
__since__ = '2009-09-10'
__version__ = '3.20.7'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
|
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <[email protected]>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
|
Fix noqa location, bump version
|
Fix noqa location, bump version
|
Python
|
bsd-3-clause
|
gmr/rejected,gmr/rejected
|
python
|
## Code Before:
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import (
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer) # noqa E402
__author__ = 'Gavin M. Roy <[email protected]>'
__since__ = '2009-09-10'
__version__ = '3.20.7'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
## Instruction:
Fix noqa location, bump version
## Code After:
import logging
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <[email protected]>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
'__since__',
'__version__',
'Consumer',
'ConsumerException',
'MessageException',
'ProcessingException',
'PublishingConsumer',
'SmartConsumer',
'SmartPublishingConsumer'
]
|
# ... existing code ...
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(logging.NullHandler())
from rejected.consumer import ( # noqa: E402
Consumer,
ConsumerException,
MessageException,
# ... modified code ...
ProcessingException,
PublishingConsumer,
SmartConsumer,
SmartPublishingConsumer)
__author__ = 'Gavin M. Roy <[email protected]>'
__since__ = '2009-09-10'
__version__ = '3.20.8'
__all__ = [
'__author__',
# ... rest of the code ...
|
8c950b364cc22d800cfac7af347b6bed3d012d6b
|
pyseeta/config.py
|
pyseeta/config.py
|
import os
import sys
config = {
'win32': {
'detector': 'seeta_fd_lib.dll',
'aligner': 'seeta_fa_lib.dll',
'identifier': 'seeta_fi_lib.dll'
},
'darwin': {
'detector': 'libseeta_fd_lib.dylib',
'aligner': 'libseeta_fa_lib.dylib',
'identifier': 'libseeta_fi_lib.dylib'
},
'linux': {
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
}
}
def get_detector_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector'])
def get_aligner_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner'])
def get_identifier_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
|
import os
import sys
config = {
'win32': {
'detector': 'seeta_fd_lib.dll',
'aligner': 'seeta_fa_lib.dll',
'identifier': 'seeta_fi_lib.dll'
},
'darwin': {
'detector': 'libseeta_fd_lib.dylib',
'aligner': 'libseeta_fa_lib.dylib',
'identifier': 'libseeta_fi_lib.dylib'
},
'linux': {
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
},
# Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
'linux2': {
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
}
}
def get_detector_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector'])
def get_aligner_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner'])
def get_identifier_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
|
Fix crash when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
|
Fix crash
when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
|
Python
|
mit
|
TuXiaokang/pyseeta
|
python
|
## Code Before:
import os
import sys
config = {
'win32': {
'detector': 'seeta_fd_lib.dll',
'aligner': 'seeta_fa_lib.dll',
'identifier': 'seeta_fi_lib.dll'
},
'darwin': {
'detector': 'libseeta_fd_lib.dylib',
'aligner': 'libseeta_fa_lib.dylib',
'identifier': 'libseeta_fi_lib.dylib'
},
'linux': {
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
}
}
def get_detector_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector'])
def get_aligner_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner'])
def get_identifier_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
## Instruction:
Fix crash
when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
## Code After:
import os
import sys
config = {
'win32': {
'detector': 'seeta_fd_lib.dll',
'aligner': 'seeta_fa_lib.dll',
'identifier': 'seeta_fi_lib.dll'
},
'darwin': {
'detector': 'libseeta_fd_lib.dylib',
'aligner': 'libseeta_fa_lib.dylib',
'identifier': 'libseeta_fi_lib.dylib'
},
'linux': {
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
},
# Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
'linux2': {
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
}
}
def get_detector_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector'])
def get_aligner_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner'])
def get_identifier_library():
dir = os.path.dirname(__file__)
return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
|
// ... existing code ...
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
},
# Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
'linux2': {
'detector': 'libseeta_fd_lib.so',
'aligner': 'libseeta_fa_lib.so',
'identifier': 'libseeta_fi_lib.so'
}
}
// ... rest of the code ...
|
bb19c79ebc976bfa390f3c6ecc59ec6e0d03dd7e
|
speed_spider.py
|
speed_spider.py
|
from grab.spider import Spider, Task
from grab.tools.logs import default_logging
import time
import logging
from random import randint
from grab.util.py3k_support import *
URL_28K = 'http://load.local/grab.html'
def timer(func):
"""
Display time taken to execute the decorated function.
"""
def inner(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
total = time.time() - start
print('Time: %.2f sec.' % total)
return result
return inner
class SpeedSpider(Spider):
def task_generator(self):
url_template = 'http://load.local/grab%d.html'
#fast_url = 'http://load.local/grab0.html'
slow_url = 'http://load.local/slow.html'
#yield Task('load', url=slow_url, disable_cache=True)
#yield Task('load', url=fast_url, disable_cache=False)
for x in xrange(500):
disable_flag = True#not (x % 2)
yield Task('load', url=url_template % x, disable_cache=disable_flag)
#if randint(0, 10) == 10:
#yield Task('load', url=slow_url, disable_cache=True)
def task_load(self, grab, task):
assert 'grab' in grab.response.body
print('ok', task.url)
@timer
def main():
default_logging()
bot = SpeedSpider(thread_number=30)
bot.setup_cache(database='speed_spider', use_compression=True)
bot.run()
print(bot.render_stats())
if __name__ == '__main__':
main()
|
from grab.spider import Spider, Task
from grab.tools.logs import default_logging
import time
import logging
from random import randint
from grab.util.py3k_support import *
URL_28K = 'http://load.local/grab.html'
def timer(func):
"""
Display time taken to execute the decorated function.
"""
def inner(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
total = time.time() - start
print('Time: %.2f sec.' % total)
return result
return inner
class SpeedSpider(Spider):
def task_generator(self):
url = 'http://load.local/grab.html'
for x in xrange(500):
yield Task('load', url=url)
def task_load(self, grab, task):
assert 'grab' in grab.response.body
print('ok', task.url)
@timer
def main():
default_logging()
bot = SpeedSpider(thread_number=30)
bot.run()
print(bot.render_stats())
if __name__ == '__main__':
main()
|
Change code of speed test
|
Change code of speed test
|
Python
|
mit
|
shaunstanislaus/grab,alihalabyah/grab,pombredanne/grab-1,alihalabyah/grab,lorien/grab,DDShadoww/grab,maurobaraldi/grab,maurobaraldi/grab,codevlabs/grab,istinspring/grab,DDShadoww/grab,codevlabs/grab,huiyi1990/grab,lorien/grab,kevinlondon/grab,giserh/grab,liorvh/grab,pombredanne/grab-1,shaunstanislaus/grab,raybuhr/grab,huiyi1990/grab,liorvh/grab,kevinlondon/grab,giserh/grab,istinspring/grab,SpaceAppsXploration/grab,raybuhr/grab,SpaceAppsXploration/grab
|
python
|
## Code Before:
from grab.spider import Spider, Task
from grab.tools.logs import default_logging
import time
import logging
from random import randint
from grab.util.py3k_support import *
URL_28K = 'http://load.local/grab.html'
def timer(func):
"""
Display time taken to execute the decorated function.
"""
def inner(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
total = time.time() - start
print('Time: %.2f sec.' % total)
return result
return inner
class SpeedSpider(Spider):
def task_generator(self):
url_template = 'http://load.local/grab%d.html'
#fast_url = 'http://load.local/grab0.html'
slow_url = 'http://load.local/slow.html'
#yield Task('load', url=slow_url, disable_cache=True)
#yield Task('load', url=fast_url, disable_cache=False)
for x in xrange(500):
disable_flag = True#not (x % 2)
yield Task('load', url=url_template % x, disable_cache=disable_flag)
#if randint(0, 10) == 10:
#yield Task('load', url=slow_url, disable_cache=True)
def task_load(self, grab, task):
assert 'grab' in grab.response.body
print('ok', task.url)
@timer
def main():
default_logging()
bot = SpeedSpider(thread_number=30)
bot.setup_cache(database='speed_spider', use_compression=True)
bot.run()
print(bot.render_stats())
if __name__ == '__main__':
main()
## Instruction:
Change code of speed test
## Code After:
from grab.spider import Spider, Task
from grab.tools.logs import default_logging
import time
import logging
from random import randint
from grab.util.py3k_support import *
URL_28K = 'http://load.local/grab.html'
def timer(func):
"""
Display time taken to execute the decorated function.
"""
def inner(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
total = time.time() - start
print('Time: %.2f sec.' % total)
return result
return inner
class SpeedSpider(Spider):
def task_generator(self):
url = 'http://load.local/grab.html'
for x in xrange(500):
yield Task('load', url=url)
def task_load(self, grab, task):
assert 'grab' in grab.response.body
print('ok', task.url)
@timer
def main():
default_logging()
bot = SpeedSpider(thread_number=30)
bot.run()
print(bot.render_stats())
if __name__ == '__main__':
main()
|
...
class SpeedSpider(Spider):
def task_generator(self):
url = 'http://load.local/grab.html'
for x in xrange(500):
yield Task('load', url=url)
def task_load(self, grab, task):
assert 'grab' in grab.response.body
...
def main():
default_logging()
bot = SpeedSpider(thread_number=30)
bot.run()
print(bot.render_stats())
...
|
797e9f3e4fad744e9211c07067992c245a344fb5
|
tests/test_whatcd.py
|
tests/test_whatcd.py
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestInputWhatCD(FlexGetBase):
__yaml__ = """
tasks:
no_fields:
whatcd:
no_user:
whatcd:
password: test
no_pass:
whatcd:
username: test
"""
def test_missing_fields(self):
self.execute_task('no_fields', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no fields present'
self.execute_task('no_user', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no username'
self.execute_task('no_pass', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no password'
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
Remove schema validation unit tests frow whatcd
|
Remove schema validation unit tests frow whatcd
|
Python
|
mit
|
JorisDeRieck/Flexget,Danfocus/Flexget,qk4l/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,ianstalk/Flexget,dsemi/Flexget,oxc/Flexget,crawln45/Flexget,qvazzler/Flexget,Flexget/Flexget,sean797/Flexget,oxc/Flexget,Flexget/Flexget,dsemi/Flexget,drwyrm/Flexget,OmgOhnoes/Flexget,drwyrm/Flexget,jacobmetrick/Flexget,jawilson/Flexget,malkavi/Flexget,crawln45/Flexget,qvazzler/Flexget,malkavi/Flexget,tarzasai/Flexget,cvium/Flexget,drwyrm/Flexget,jawilson/Flexget,LynxyssCZ/Flexget,lildadou/Flexget,cvium/Flexget,lildadou/Flexget,Pretagonist/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,poulpito/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,jacobmetrick/Flexget,poulpito/Flexget,jacobmetrick/Flexget,Pretagonist/Flexget,antivirtel/Flexget,tsnoam/Flexget,Danfocus/Flexget,tobinjt/Flexget,Pretagonist/Flexget,tsnoam/Flexget,gazpachoking/Flexget,qk4l/Flexget,antivirtel/Flexget,tarzasai/Flexget,lildadou/Flexget,ianstalk/Flexget,poulpito/Flexget,tobinjt/Flexget,jawilson/Flexget,Danfocus/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,oxc/Flexget,OmgOhnoes/Flexget,antivirtel/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,gazpachoking/Flexget,crawln45/Flexget,cvium/Flexget,qvazzler/Flexget,tarzasai/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,crawln45/Flexget,malkavi/Flexget,sean797/Flexget,tsnoam/Flexget,dsemi/Flexget,Danfocus/Flexget,sean797/Flexget,tobinjt/Flexget,ianstalk/Flexget
|
python
|
## Code Before:
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestInputWhatCD(FlexGetBase):
__yaml__ = """
tasks:
no_fields:
whatcd:
no_user:
whatcd:
password: test
no_pass:
whatcd:
username: test
"""
def test_missing_fields(self):
self.execute_task('no_fields', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no fields present'
self.execute_task('no_user', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no username'
self.execute_task('no_pass', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no password'
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
## Instruction:
Remove schema validation unit tests frow whatcd
## Code After:
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
// ... existing code ...
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
// ... rest of the code ...
|
82c2fa81721e913e54e8b3c62e3971a79cadae20
|
edu.oregonstate.cope.clientrecorder/tests/edu/oregonstate/cope/clientRecorder/RecorderPropertiesTest.java
|
edu.oregonstate.cope.clientrecorder/tests/edu/oregonstate/cope/clientRecorder/RecorderPropertiesTest.java
|
package edu.oregonstate.cope.clientRecorder;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import edu.oregonstate.cope.tests.util.StubFileProvider;
public class RecorderPropertiesTest {
private RecorderProperties properties;
private StubFileProvider fileProvider;
@Before
public void setup() {
fileProvider = new StubFileProvider();
properties = new RecorderProperties(fileProvider);
}
@Test
public void testAddNull() {
properties.addProperty(null, null);
assertNull(properties.getProperty(null), null);
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddEmpty() throws Exception {
properties.addProperty("", "");
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddNonEmpty() throws Exception {
properties.addProperty("k", "v");
assertEquals("v", properties.getProperty("k"));
assertEquals("v", new RecorderProperties(fileProvider).getProperty("k"));
}
}
|
package edu.oregonstate.cope.clientRecorder;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import edu.oregonstate.cope.tests.util.StubFileProvider;
public class RecorderPropertiesTest {
private RecorderProperties properties;
private StubFileProvider fileProvider;
@Before
public void setup() {
fileProvider = new StubFileProvider();
properties = new RecorderProperties(fileProvider);
}
@Test
public void testAddNull() {
properties.addProperty(null, null);
assertNull(properties.getProperty(null), null);
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddEmpty() throws Exception {
properties.addProperty("", "");
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddOneKey() throws Exception {
properties.addProperty("k", "v");
testForKey("k", "v");
}
@Test
public void testAddTwoKeys() throws Exception {
properties.addProperty("k1", "v1");
properties.addProperty("k2", "v2");
testForKey("k1", "v1");
testForKey("k2", "v2");
}
private void testForKey(String key, String value) {
assertEquals(value, properties.getProperty(key));
assertEquals(value, new RecorderProperties(fileProvider).getProperty(key));
}
}
|
Test for multiple key insertions added
|
Test for multiple key insertions added
|
Java
|
epl-1.0
|
ChangeOrientedProgrammingEnvironment/eclipseRecorder,ChangeOrientedProgrammingEnvironment/eclipseRecorder,ChangeOrientedProgrammingEnvironment/eclipseReplayer
|
java
|
## Code Before:
package edu.oregonstate.cope.clientRecorder;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import edu.oregonstate.cope.tests.util.StubFileProvider;
public class RecorderPropertiesTest {
private RecorderProperties properties;
private StubFileProvider fileProvider;
@Before
public void setup() {
fileProvider = new StubFileProvider();
properties = new RecorderProperties(fileProvider);
}
@Test
public void testAddNull() {
properties.addProperty(null, null);
assertNull(properties.getProperty(null), null);
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddEmpty() throws Exception {
properties.addProperty("", "");
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddNonEmpty() throws Exception {
properties.addProperty("k", "v");
assertEquals("v", properties.getProperty("k"));
assertEquals("v", new RecorderProperties(fileProvider).getProperty("k"));
}
}
## Instruction:
Test for multiple key insertions added
## Code After:
package edu.oregonstate.cope.clientRecorder;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import edu.oregonstate.cope.tests.util.StubFileProvider;
public class RecorderPropertiesTest {
private RecorderProperties properties;
private StubFileProvider fileProvider;
@Before
public void setup() {
fileProvider = new StubFileProvider();
properties = new RecorderProperties(fileProvider);
}
@Test
public void testAddNull() {
properties.addProperty(null, null);
assertNull(properties.getProperty(null), null);
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddEmpty() throws Exception {
properties.addProperty("", "");
assertTrue(fileProvider.isCurrentFileEmpty());
}
@Test
public void testAddOneKey() throws Exception {
properties.addProperty("k", "v");
testForKey("k", "v");
}
@Test
public void testAddTwoKeys() throws Exception {
properties.addProperty("k1", "v1");
properties.addProperty("k2", "v2");
testForKey("k1", "v1");
testForKey("k2", "v2");
}
private void testForKey(String key, String value) {
assertEquals(value, properties.getProperty(key));
assertEquals(value, new RecorderProperties(fileProvider).getProperty(key));
}
}
|
...
}
@Test
public void testAddOneKey() throws Exception {
properties.addProperty("k", "v");
testForKey("k", "v");
}
@Test
public void testAddTwoKeys() throws Exception {
properties.addProperty("k1", "v1");
properties.addProperty("k2", "v2");
testForKey("k1", "v1");
testForKey("k2", "v2");
}
private void testForKey(String key, String value) {
assertEquals(value, properties.getProperty(key));
assertEquals(value, new RecorderProperties(fileProvider).getProperty(key));
}
}
...
|
bce29e6ce7bdeeefcbdced3ea5131958e72880a1
|
to.etc.domui.demo/src/main/java/to/etc/domuidemo/pages/overview/allcomponents/FileUploadFragment.java
|
to.etc.domui.demo/src/main/java/to/etc/domuidemo/pages/overview/allcomponents/FileUploadFragment.java
|
package to.etc.domuidemo.pages.overview.allcomponents;
import to.etc.domui.component.upload.FileUpload2;
import to.etc.domui.component.upload.FileUploadMultiple;
import to.etc.domui.component2.form4.FormBuilder;
import to.etc.domui.dom.html.Div;
import to.etc.domui.dom.html.HTag;
/**
* @author <a href="mailto:[email protected]">Frits Jalvingh</a>
* Created on 14-11-17.
*/
public class FileUploadFragment extends Div {
@Override public void createContent() throws Exception {
add(new HTag(2, "File upload component").css("ui-header"));
FormBuilder fb = new FormBuilder(this);
FileUpload2 u1 = new FileUpload2("png", "jpg", "gif", "jpeg");
fb.label("Select an image").item(u1);
FileUploadMultiple u2 = new FileUploadMultiple("png", "jpg", "gif", "jpeg");
fb.label("Select multiple").item(u2);
//FileUpload u2 = new FileUpload("png", "jpg", "gif", "jpeg");
//fb.label("OLD").item(u2);
}
}
|
package to.etc.domuidemo.pages.overview.allcomponents;
import to.etc.domui.component.layout.ContentPanel;
import to.etc.domui.component.upload.FileUpload2;
import to.etc.domui.component.upload.FileUploadMultiple;
import to.etc.domui.component2.form4.FormBuilder;
import to.etc.domui.dom.html.Div;
import to.etc.domui.dom.html.HTag;
/**
* @author <a href="mailto:[email protected]">Frits Jalvingh</a>
* Created on 14-11-17.
*/
public class FileUploadFragment extends Div {
@Override public void createContent() throws Exception {
add(new HTag(2, "File upload component").css("ui-header"));
ContentPanel cp = new ContentPanel();
add(cp);
FormBuilder fb = new FormBuilder(cp);
FileUpload2 u1 = new FileUpload2("png", "jpg", "gif", "jpeg");
fb.label("Select an image").item(u1);
FileUploadMultiple u2 = new FileUploadMultiple("png", "jpg", "gif", "jpeg");
fb.label("Select multiple").item(u2);
//FileUpload u2 = new FileUpload("png", "jpg", "gif", "jpeg");
//fb.label("OLD").item(u2);
}
}
|
Fix spacing in demo for FileUpload.
|
Fix spacing in demo for FileUpload.
|
Java
|
lgpl-2.1
|
fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui
|
java
|
## Code Before:
package to.etc.domuidemo.pages.overview.allcomponents;
import to.etc.domui.component.upload.FileUpload2;
import to.etc.domui.component.upload.FileUploadMultiple;
import to.etc.domui.component2.form4.FormBuilder;
import to.etc.domui.dom.html.Div;
import to.etc.domui.dom.html.HTag;
/**
* @author <a href="mailto:[email protected]">Frits Jalvingh</a>
* Created on 14-11-17.
*/
public class FileUploadFragment extends Div {
@Override public void createContent() throws Exception {
add(new HTag(2, "File upload component").css("ui-header"));
FormBuilder fb = new FormBuilder(this);
FileUpload2 u1 = new FileUpload2("png", "jpg", "gif", "jpeg");
fb.label("Select an image").item(u1);
FileUploadMultiple u2 = new FileUploadMultiple("png", "jpg", "gif", "jpeg");
fb.label("Select multiple").item(u2);
//FileUpload u2 = new FileUpload("png", "jpg", "gif", "jpeg");
//fb.label("OLD").item(u2);
}
}
## Instruction:
Fix spacing in demo for FileUpload.
## Code After:
package to.etc.domuidemo.pages.overview.allcomponents;
import to.etc.domui.component.layout.ContentPanel;
import to.etc.domui.component.upload.FileUpload2;
import to.etc.domui.component.upload.FileUploadMultiple;
import to.etc.domui.component2.form4.FormBuilder;
import to.etc.domui.dom.html.Div;
import to.etc.domui.dom.html.HTag;
/**
* @author <a href="mailto:[email protected]">Frits Jalvingh</a>
* Created on 14-11-17.
*/
public class FileUploadFragment extends Div {
@Override public void createContent() throws Exception {
add(new HTag(2, "File upload component").css("ui-header"));
ContentPanel cp = new ContentPanel();
add(cp);
FormBuilder fb = new FormBuilder(cp);
FileUpload2 u1 = new FileUpload2("png", "jpg", "gif", "jpeg");
fb.label("Select an image").item(u1);
FileUploadMultiple u2 = new FileUploadMultiple("png", "jpg", "gif", "jpeg");
fb.label("Select multiple").item(u2);
//FileUpload u2 = new FileUpload("png", "jpg", "gif", "jpeg");
//fb.label("OLD").item(u2);
}
}
|
# ... existing code ...
package to.etc.domuidemo.pages.overview.allcomponents;
import to.etc.domui.component.layout.ContentPanel;
import to.etc.domui.component.upload.FileUpload2;
import to.etc.domui.component.upload.FileUploadMultiple;
import to.etc.domui.component2.form4.FormBuilder;
# ... modified code ...
@Override public void createContent() throws Exception {
add(new HTag(2, "File upload component").css("ui-header"));
ContentPanel cp = new ContentPanel();
add(cp);
FormBuilder fb = new FormBuilder(cp);
FileUpload2 u1 = new FileUpload2("png", "jpg", "gif", "jpeg");
fb.label("Select an image").item(u1);
# ... rest of the code ...
|
eafafd3d90024c552a6a607871c1441e358eb927
|
Bar.py
|
Bar.py
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
Fix bar graph x-axis centering.
|
Fix bar graph x-axis centering.
|
Python
|
bsd-3-clause
|
alexras/boomslang
|
python
|
## Code Before:
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
## Instruction:
Fix bar graph x-axis centering.
## Code After:
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
# ... existing code ...
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
# ... rest of the code ...
|
59f5007787b87a37b5e5669a75d39d1d7e88e0e9
|
redfish/__init__.py
|
redfish/__init__.py
|
import pbr.version
from redfish.main import *
#import redfish.types
__version__ = pbr.version.VersionInfo('redfish').version_string()
|
import pbr.version
from redfish.main import *
#import redfish.types
try:
__version__ = pbr.version.VersionInfo('redfish').version_string()
except Exception, e:
if "Versioning for this project requires either an sdist tarball" in e.message:
pass
else:
raise
|
Fix pbr if running without git or sdist
|
Fix pbr if running without git or sdist
|
Python
|
apache-2.0
|
uggla/python-redfish,bcornec/python-redfish,bcornec/python-redfish,uggla/python-redfish,uggla/python-redfish,bcornec/python-redfish
|
python
|
## Code Before:
import pbr.version
from redfish.main import *
#import redfish.types
__version__ = pbr.version.VersionInfo('redfish').version_string()
## Instruction:
Fix pbr if running without git or sdist
## Code After:
import pbr.version
from redfish.main import *
#import redfish.types
try:
__version__ = pbr.version.VersionInfo('redfish').version_string()
except Exception, e:
if "Versioning for this project requires either an sdist tarball" in e.message:
pass
else:
raise
|
// ... existing code ...
from redfish.main import *
#import redfish.types
try:
__version__ = pbr.version.VersionInfo('redfish').version_string()
except Exception, e:
if "Versioning for this project requires either an sdist tarball" in e.message:
pass
else:
raise
// ... rest of the code ...
|
3a6d76201104b928c1b9053317c9e61804814ff5
|
pyresticd.py
|
pyresticd.py
|
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print "\nStarting Backup at " + str(time.ctime())
os.system(restic_command)
print "\nRestic Scheduler\n----------------------------\n"
print "Timout ist: " + str(timeout)
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
Use py3-style print and string-formatting
|
Use py3-style print and string-formatting
|
Python
|
mit
|
Mebus/pyresticd,Mebus/pyresticd
|
python
|
## Code Before:
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print "\nStarting Backup at " + str(time.ctime())
os.system(restic_command)
print "\nRestic Scheduler\n----------------------------\n"
print "Timout ist: " + str(timeout)
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
## Instruction:
Use py3-style print and string-formatting
## Code After:
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
# ... existing code ...
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# ... modified code ...
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
# ... rest of the code ...
|
37eee0a8a597efae04476781e435b44066f0f1e9
|
src/main/java/edu/northwestern/bioinformatics/studycalendar/dao/PlannedActivityDao.java
|
src/main/java/edu/northwestern/bioinformatics/studycalendar/dao/PlannedActivityDao.java
|
package edu.northwestern.bioinformatics.studycalendar.dao;
import edu.northwestern.bioinformatics.studycalendar.domain.PlannedActivity;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**
* @author Rhett Sutphin
*/
@Transactional (readOnly=true)
public class PlannedActivityDao extends StudyCalendarMutableDomainObjectDao<PlannedActivity> {
@Override
public Class<PlannedActivity> domainClass() {
return PlannedActivity.class;
}
@Transactional(readOnly=false)
public void delete(PlannedActivity event) {
getHibernateTemplate().delete(event);
}
public List<PlannedActivity> getPlannedActivitiesForAcivity(Integer activityId) {
return (List<PlannedActivity>) getHibernateTemplate().find("select pa from PlannedActivity as pa where pa.activity.id=?",activityId);
}
}
|
package edu.northwestern.bioinformatics.studycalendar.dao;
import edu.northwestern.bioinformatics.studycalendar.domain.PlannedActivity;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**
* @author Rhett Sutphin
*/
@Transactional (readOnly=true)
public class PlannedActivityDao extends StudyCalendarMutableDomainObjectDao<PlannedActivity> {
@Override
public Class<PlannedActivity> domainClass() {
return PlannedActivity.class;
}
/**
* Deletes a planned activity
*
* @param event the planned activity to delete
*/
@Transactional(readOnly=false)
public void delete(PlannedActivity event) {
getHibernateTemplate().delete(event);
}
/**
* Finds all planned activities for a activity id
*
* @param activityId the activity id to search with
* @return a list of planned activities with the activity id passed in
*/
public List<PlannedActivity> getPlannedActivitiesForAcivity(Integer activityId) {
return (List<PlannedActivity>) getHibernateTemplate().find("select pa from PlannedActivity as pa where pa.activity.id=?",activityId);
}
}
|
Add javadoc to planned activity dao.
|
Add javadoc to planned activity dao.
git-svn-id: 4b387fe5ada7764508e2ca96c335714e4c1692c6@2431 0d517254-b314-0410-acde-c619094fa49f
|
Java
|
bsd-3-clause
|
NUBIC/psc-mirror,NUBIC/psc-mirror,NUBIC/psc-mirror,NUBIC/psc-mirror
|
java
|
## Code Before:
package edu.northwestern.bioinformatics.studycalendar.dao;
import edu.northwestern.bioinformatics.studycalendar.domain.PlannedActivity;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**
* @author Rhett Sutphin
*/
@Transactional (readOnly=true)
public class PlannedActivityDao extends StudyCalendarMutableDomainObjectDao<PlannedActivity> {
@Override
public Class<PlannedActivity> domainClass() {
return PlannedActivity.class;
}
@Transactional(readOnly=false)
public void delete(PlannedActivity event) {
getHibernateTemplate().delete(event);
}
public List<PlannedActivity> getPlannedActivitiesForAcivity(Integer activityId) {
return (List<PlannedActivity>) getHibernateTemplate().find("select pa from PlannedActivity as pa where pa.activity.id=?",activityId);
}
}
## Instruction:
Add javadoc to planned activity dao.
git-svn-id: 4b387fe5ada7764508e2ca96c335714e4c1692c6@2431 0d517254-b314-0410-acde-c619094fa49f
## Code After:
package edu.northwestern.bioinformatics.studycalendar.dao;
import edu.northwestern.bioinformatics.studycalendar.domain.PlannedActivity;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**
* @author Rhett Sutphin
*/
@Transactional (readOnly=true)
public class PlannedActivityDao extends StudyCalendarMutableDomainObjectDao<PlannedActivity> {
@Override
public Class<PlannedActivity> domainClass() {
return PlannedActivity.class;
}
/**
* Deletes a planned activity
*
* @param event the planned activity to delete
*/
@Transactional(readOnly=false)
public void delete(PlannedActivity event) {
getHibernateTemplate().delete(event);
}
/**
* Finds all planned activities for a activity id
*
* @param activityId the activity id to search with
* @return a list of planned activities with the activity id passed in
*/
public List<PlannedActivity> getPlannedActivitiesForAcivity(Integer activityId) {
return (List<PlannedActivity>) getHibernateTemplate().find("select pa from PlannedActivity as pa where pa.activity.id=?",activityId);
}
}
|
// ... existing code ...
return PlannedActivity.class;
}
/**
* Deletes a planned activity
*
* @param event the planned activity to delete
*/
@Transactional(readOnly=false)
public void delete(PlannedActivity event) {
getHibernateTemplate().delete(event);
}
/**
* Finds all planned activities for a activity id
*
* @param activityId the activity id to search with
* @return a list of planned activities with the activity id passed in
*/
public List<PlannedActivity> getPlannedActivitiesForAcivity(Integer activityId) {
return (List<PlannedActivity>) getHibernateTemplate().find("select pa from PlannedActivity as pa where pa.activity.id=?",activityId);
}
}
// ... rest of the code ...
|
a14602c40e482905e393dcfe497c1105b157494c
|
reon-api-java/src/main/java/io/reon/auth/AbstractAuth.java
|
reon-api-java/src/main/java/io/reon/auth/AbstractAuth.java
|
package io.reon.auth;
import java.util.HashMap;
import io.reon.util.Hex;
public abstract class AbstractAuth implements HttpAuth {
public static final String REALM = "realm";
public static final String RESPONSE = "response";
public static final String NONCE = "nonce";
public static final String PK = "pk";
public static final String EQ = "=\"";
public static final String EOS = "\", ";
public static final String EOT = "\"";
public static HashMap<String, String> parse(String text) {
String[] fields = text.substring(text.indexOf(' ')+1).split(",");
HashMap<String, String> attrs = new HashMap<>();
for(String field: fields) {
String[] kv = field.split("=");
attrs.put(kv[0].trim(), kv[1].replace('"',' ').trim());
}
return attrs;
}
public static byte[] fromHex(String value) {
return Hex.decodeHex(value);
}
public static String toHex(byte[] value) {
return Hex.encodeHex(value, false);
}
}
|
package io.reon.auth;
import java.util.HashMap;
import io.reon.util.Hex;
public abstract class AbstractAuth implements HttpAuth {
public static final String REALM = "realm";
public static final String RESPONSE = "response";
public static final String NONCE = "nonce";
public static final String PK = "pk";
public static final String EQ = "=\"";
public static final String EOS = "\", ";
public static final String EOT = "\"";
public static HashMap<String, String> parse(String text) {
HashMap<String, String> attrs = new HashMap<>();
try {
String[] fields = text.substring(text.indexOf(' ') + 1).split(",");
for (String field : fields) {
String[] kv = field.split("=");
attrs.put(kv[0].trim(), kv[1].replace('"', ' ').trim());
}
} catch (RuntimeException ex) {
// ignore
}
return attrs;
}
public static byte[] fromHex(String value) {
return Hex.decodeHex(value);
}
public static String toHex(byte[] value) {
return Hex.encodeHex(value, false);
}
}
|
Fix for crash over badly formatted auth requests
|
Fix for crash over badly formatted auth requests
|
Java
|
apache-2.0
|
reon-io/sdk,reon-io/reon-sdk
|
java
|
## Code Before:
package io.reon.auth;
import java.util.HashMap;
import io.reon.util.Hex;
public abstract class AbstractAuth implements HttpAuth {
public static final String REALM = "realm";
public static final String RESPONSE = "response";
public static final String NONCE = "nonce";
public static final String PK = "pk";
public static final String EQ = "=\"";
public static final String EOS = "\", ";
public static final String EOT = "\"";
public static HashMap<String, String> parse(String text) {
String[] fields = text.substring(text.indexOf(' ')+1).split(",");
HashMap<String, String> attrs = new HashMap<>();
for(String field: fields) {
String[] kv = field.split("=");
attrs.put(kv[0].trim(), kv[1].replace('"',' ').trim());
}
return attrs;
}
public static byte[] fromHex(String value) {
return Hex.decodeHex(value);
}
public static String toHex(byte[] value) {
return Hex.encodeHex(value, false);
}
}
## Instruction:
Fix for crash over badly formatted auth requests
## Code After:
package io.reon.auth;
import java.util.HashMap;
import io.reon.util.Hex;
public abstract class AbstractAuth implements HttpAuth {
public static final String REALM = "realm";
public static final String RESPONSE = "response";
public static final String NONCE = "nonce";
public static final String PK = "pk";
public static final String EQ = "=\"";
public static final String EOS = "\", ";
public static final String EOT = "\"";
public static HashMap<String, String> parse(String text) {
HashMap<String, String> attrs = new HashMap<>();
try {
String[] fields = text.substring(text.indexOf(' ') + 1).split(",");
for (String field : fields) {
String[] kv = field.split("=");
attrs.put(kv[0].trim(), kv[1].replace('"', ' ').trim());
}
} catch (RuntimeException ex) {
// ignore
}
return attrs;
}
public static byte[] fromHex(String value) {
return Hex.decodeHex(value);
}
public static String toHex(byte[] value) {
return Hex.encodeHex(value, false);
}
}
|
...
public static final String EOT = "\"";
public static HashMap<String, String> parse(String text) {
HashMap<String, String> attrs = new HashMap<>();
try {
String[] fields = text.substring(text.indexOf(' ') + 1).split(",");
for (String field : fields) {
String[] kv = field.split("=");
attrs.put(kv[0].trim(), kv[1].replace('"', ' ').trim());
}
} catch (RuntimeException ex) {
// ignore
}
return attrs;
}
...
|
fb2cfe4759fb98de644932af17a247428b2cc0f5
|
api/auth.py
|
api/auth.py
|
from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from api.models import AuthAPIKey
class APIKeyAuthentication(object):
def is_authenticated(self, request):
params = {}
for key,value in request.GET.items():
params[key.lower()] = value
if params['apikey']:
try:
keyobj = AuthAPIKey.objects.get(key=params['apikey'])
except:
keyobj = None
if keyobj and keyobj.active:
request.user = AnonymousUser()
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
|
from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from api.models import AuthAPIKey
class APIKeyAuthentication(object):
def is_authenticated(self, request):
params = {}
for key,value in request.GET.items():
params[key.lower()] = value
if 'apikey' in params:
try:
keyobj = AuthAPIKey.objects.get(key=params['apikey'])
except:
keyobj = None
if keyobj and keyobj.active:
request.user = AnonymousUser()
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
|
Fix Auth API key check causing error 500s
|
Fix Auth API key check causing error 500s
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
python
|
## Code Before:
from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from api.models import AuthAPIKey
class APIKeyAuthentication(object):
def is_authenticated(self, request):
params = {}
for key,value in request.GET.items():
params[key.lower()] = value
if params['apikey']:
try:
keyobj = AuthAPIKey.objects.get(key=params['apikey'])
except:
keyobj = None
if keyobj and keyobj.active:
request.user = AnonymousUser()
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
## Instruction:
Fix Auth API key check causing error 500s
## Code After:
from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from api.models import AuthAPIKey
class APIKeyAuthentication(object):
def is_authenticated(self, request):
params = {}
for key,value in request.GET.items():
params[key.lower()] = value
if 'apikey' in params:
try:
keyobj = AuthAPIKey.objects.get(key=params['apikey'])
except:
keyobj = None
if keyobj and keyobj.active:
request.user = AnonymousUser()
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
|
# ... existing code ...
for key,value in request.GET.items():
params[key.lower()] = value
if 'apikey' in params:
try:
keyobj = AuthAPIKey.objects.get(key=params['apikey'])
except:
# ... rest of the code ...
|
b24de72fff7731063d283c3520bbeb2c6920981f
|
src/main/java/com/hp/autonomy/types/requests/idol/actions/tags/params/GetQueryTagValuesParams.java
|
src/main/java/com/hp/autonomy/types/requests/idol/actions/tags/params/GetQueryTagValuesParams.java
|
/*
* Copyright 2015-2016 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.tags.params;
public enum GetQueryTagValuesParams {
AllowNonParametricFields,
DateOffset,
DatePeriod,
DocumentCount,
FieldDependence,
FieldDependenceMultiLevel,
FieldName,
MaxValues,
Merge,
Ranges,
RestrictedValues,
Sort,
Start,
Synchronous,
TotalValues,
ValueDetails;
public static GetQueryTagValuesParams fromValue(final String value) {
for (final GetQueryTagValuesParams param : values()) {
if (param.name().equalsIgnoreCase(value)) {
return param;
}
}
throw new IllegalArgumentException("Unknown parameter " + value);
}
}
|
/*
* Copyright 2015-2016 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.tags.params;
public enum GetQueryTagValuesParams {
AllowNonParametricFields,
CustomWeight,
DateOffset,
DatePeriod,
DocumentCount,
FieldDependence,
FieldDependenceMultiLevel,
FieldName,
MaxValues,
Merge,
Predict,
Ranges,
RestrictedValues,
Sort,
Start,
Synchronous,
TotalValues,
ValueDetails,
ValuePercentiles,
ValueRestriction;
public static GetQueryTagValuesParams fromValue(final String value) {
for (final GetQueryTagValuesParams param : values()) {
if (param.name().equalsIgnoreCase(value)) {
return param;
}
}
throw new IllegalArgumentException("Unknown parameter " + value);
}
}
|
Add support for new ValueRestriction QueryTagValues parameter in HavenSearchComponents (FIND-975)
|
Add support for new ValueRestriction QueryTagValues parameter in HavenSearchComponents (FIND-975)
Add new parameters for GetQueryTagValues
[rev. matthew.gordon]
|
Java
|
mit
|
hpautonomy/java-aci-types,hpe-idol/java-aci-types,hpe-idol/java-aci-types,hpautonomy/java-aci-types
|
java
|
## Code Before:
/*
* Copyright 2015-2016 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.tags.params;
public enum GetQueryTagValuesParams {
AllowNonParametricFields,
DateOffset,
DatePeriod,
DocumentCount,
FieldDependence,
FieldDependenceMultiLevel,
FieldName,
MaxValues,
Merge,
Ranges,
RestrictedValues,
Sort,
Start,
Synchronous,
TotalValues,
ValueDetails;
public static GetQueryTagValuesParams fromValue(final String value) {
for (final GetQueryTagValuesParams param : values()) {
if (param.name().equalsIgnoreCase(value)) {
return param;
}
}
throw new IllegalArgumentException("Unknown parameter " + value);
}
}
## Instruction:
Add support for new ValueRestriction QueryTagValues parameter in HavenSearchComponents (FIND-975)
Add new parameters for GetQueryTagValues
[rev. matthew.gordon]
## Code After:
/*
* Copyright 2015-2016 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.tags.params;
public enum GetQueryTagValuesParams {
AllowNonParametricFields,
CustomWeight,
DateOffset,
DatePeriod,
DocumentCount,
FieldDependence,
FieldDependenceMultiLevel,
FieldName,
MaxValues,
Merge,
Predict,
Ranges,
RestrictedValues,
Sort,
Start,
Synchronous,
TotalValues,
ValueDetails,
ValuePercentiles,
ValueRestriction;
public static GetQueryTagValuesParams fromValue(final String value) {
for (final GetQueryTagValuesParams param : values()) {
if (param.name().equalsIgnoreCase(value)) {
return param;
}
}
throw new IllegalArgumentException("Unknown parameter " + value);
}
}
|
# ... existing code ...
public enum GetQueryTagValuesParams {
AllowNonParametricFields,
CustomWeight,
DateOffset,
DatePeriod,
DocumentCount,
# ... modified code ...
FieldName,
MaxValues,
Merge,
Predict,
Ranges,
RestrictedValues,
Sort,
...
Start,
Synchronous,
TotalValues,
ValueDetails,
ValuePercentiles,
ValueRestriction;
public static GetQueryTagValuesParams fromValue(final String value) {
for (final GetQueryTagValuesParams param : values()) {
# ... rest of the code ...
|
eae0aef4f5149febf7a881799a33b4eacd967b7a
|
src/test/net/sf/clirr/ant/ChangeCounterTest.java
|
src/test/net/sf/clirr/ant/ChangeCounterTest.java
|
package net.sf.clirr.ant;
import junit.framework.TestCase;
import net.sf.clirr.event.ApiDifference;
import net.sf.clirr.event.Severity;
public class ChangeCounterTest extends TestCase
{
public void testCorrectCounting()
{
ChangeCounter counter = new ChangeCounter();
counter.reportDiff(new ApiDifference("blah", Severity.WARNING, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.ERROR, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.INFO, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.ERROR, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.ERROR, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.WARNING, "Test", null, null));
assertEquals("number of expected errors", 3, counter.getBinErrors());
assertEquals("number of expected warnings", 2, counter.getBinWarnings());
assertEquals("number of expected infos", 1, counter.getBinInfos());
}
}
|
package net.sf.clirr.ant;
import junit.framework.TestCase;
import net.sf.clirr.event.ApiDifference;
import net.sf.clirr.event.Severity;
import net.sf.clirr.event.Message;
public class ChangeCounterTest extends TestCase
{
public void testCorrectCounting()
{
// a dummy message object
Message msg = new Message(0, false);
ChangeCounter counter = new ChangeCounter();
counter.reportDiff(new ApiDifference(msg, Severity.WARNING, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.INFO, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.WARNING, "Test", null, null, null));
assertEquals("number of expected errors", 3, counter.getBinErrors());
assertEquals("number of expected warnings", 2, counter.getBinWarnings());
assertEquals("number of expected infos", 1, counter.getBinInfos());
}
}
|
Use dummy Message object to be compatible with new ApiDifference API
|
Use dummy Message object to be compatible with new ApiDifference API
|
Java
|
apache-2.0
|
lukegb/clirr
|
java
|
## Code Before:
package net.sf.clirr.ant;
import junit.framework.TestCase;
import net.sf.clirr.event.ApiDifference;
import net.sf.clirr.event.Severity;
public class ChangeCounterTest extends TestCase
{
public void testCorrectCounting()
{
ChangeCounter counter = new ChangeCounter();
counter.reportDiff(new ApiDifference("blah", Severity.WARNING, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.ERROR, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.INFO, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.ERROR, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.ERROR, "Test", null, null));
counter.reportDiff(new ApiDifference("blah", Severity.WARNING, "Test", null, null));
assertEquals("number of expected errors", 3, counter.getBinErrors());
assertEquals("number of expected warnings", 2, counter.getBinWarnings());
assertEquals("number of expected infos", 1, counter.getBinInfos());
}
}
## Instruction:
Use dummy Message object to be compatible with new ApiDifference API
## Code After:
package net.sf.clirr.ant;
import junit.framework.TestCase;
import net.sf.clirr.event.ApiDifference;
import net.sf.clirr.event.Severity;
import net.sf.clirr.event.Message;
public class ChangeCounterTest extends TestCase
{
public void testCorrectCounting()
{
// a dummy message object
Message msg = new Message(0, false);
ChangeCounter counter = new ChangeCounter();
counter.reportDiff(new ApiDifference(msg, Severity.WARNING, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.INFO, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.WARNING, "Test", null, null, null));
assertEquals("number of expected errors", 3, counter.getBinErrors());
assertEquals("number of expected warnings", 2, counter.getBinWarnings());
assertEquals("number of expected infos", 1, counter.getBinInfos());
}
}
|
...
import junit.framework.TestCase;
import net.sf.clirr.event.ApiDifference;
import net.sf.clirr.event.Severity;
import net.sf.clirr.event.Message;
public class ChangeCounterTest extends TestCase
{
public void testCorrectCounting()
{
// a dummy message object
Message msg = new Message(0, false);
ChangeCounter counter = new ChangeCounter();
counter.reportDiff(new ApiDifference(msg, Severity.WARNING, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.INFO, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.ERROR, "Test", null, null, null));
counter.reportDiff(new ApiDifference(msg, Severity.WARNING, "Test", null, null, null));
assertEquals("number of expected errors", 3, counter.getBinErrors());
assertEquals("number of expected warnings", 2, counter.getBinWarnings());
assertEquals("number of expected infos", 1, counter.getBinInfos());
...
|
72045f86b25b396160e1a4c9237e977ed575afb2
|
apps/catalogue/constants.py
|
apps/catalogue/constants.py
|
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
|
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/deed.pl'] = \
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/']
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
|
Support for 'deed.pl' license URL.
|
Support for 'deed.pl' license URL.
|
Python
|
agpl-3.0
|
fnp/wolnelektury,fnp/wolnelektury,fnp/wolnelektury,fnp/wolnelektury
|
python
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
## Instruction:
Support for 'deed.pl' license URL.
## Code After:
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/deed.pl'] = \
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/']
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
|
# ... existing code ...
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/deed.pl'] = \
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/']
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# ... rest of the code ...
|
4efa9c87264eabb6712f4fb787ab0de42be18de6
|
places/urls.py
|
places/urls.py
|
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
|
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
Move places urlpatterns to Django 2.0 preferred method
|
Move places urlpatterns to Django 2.0 preferred method
|
Python
|
mit
|
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
|
python
|
## Code Before:
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
## Instruction:
Move places urlpatterns to Django 2.0 preferred method
## Code After:
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
# ... existing code ...
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
# ... rest of the code ...
|
8bafd7d816991e89b8599aff4f5a1ef6d27dc80e
|
kerberos5/include/crypto-headers.h
|
kerberos5/include/crypto-headers.h
|
/* $FreeBSD$ */
#ifndef __crypto_headers_h__
#define __crypto_headers_h__
#include <openssl/des.h>
#include <openssl/rc4.h>
#include <openssl/md4.h>
#include <openssl/md5.h>
#include <openssl/sha.h>
#endif /* __crypto_headers_h__ */
|
/* $FreeBSD$ */
#ifndef __crypto_headers_h__
#define __crypto_headers_h__
#define OPENSSL_DES_LIBDES_COMPATIBILITY
#include <openssl/des.h>
#include <openssl/rc4.h>
#include <openssl/md4.h>
#include <openssl/md5.h>
#include <openssl/sha.h>
#endif /* __crypto_headers_h__ */
|
Define OPENSSL_DES_LIBDES_COMPATIBILITY so that Heimdal will build with OpenSSL 0.9.7 when it is imported. (This currently has no effect.)
|
Define OPENSSL_DES_LIBDES_COMPATIBILITY so that Heimdal will build with
OpenSSL 0.9.7 when it is imported. (This currently has no effect.)
|
C
|
bsd-3-clause
|
jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase
|
c
|
## Code Before:
/* $FreeBSD$ */
#ifndef __crypto_headers_h__
#define __crypto_headers_h__
#include <openssl/des.h>
#include <openssl/rc4.h>
#include <openssl/md4.h>
#include <openssl/md5.h>
#include <openssl/sha.h>
#endif /* __crypto_headers_h__ */
## Instruction:
Define OPENSSL_DES_LIBDES_COMPATIBILITY so that Heimdal will build with
OpenSSL 0.9.7 when it is imported. (This currently has no effect.)
## Code After:
/* $FreeBSD$ */
#ifndef __crypto_headers_h__
#define __crypto_headers_h__
#define OPENSSL_DES_LIBDES_COMPATIBILITY
#include <openssl/des.h>
#include <openssl/rc4.h>
#include <openssl/md4.h>
#include <openssl/md5.h>
#include <openssl/sha.h>
#endif /* __crypto_headers_h__ */
|
...
/* $FreeBSD$ */
#ifndef __crypto_headers_h__
#define __crypto_headers_h__
#define OPENSSL_DES_LIBDES_COMPATIBILITY
#include <openssl/des.h>
#include <openssl/rc4.h>
#include <openssl/md4.h>
...
|
d57bf8b0995496d8cabc81410154ab64a0673e01
|
databroker/sources/dummy_sources/_metadataStore/api/analysis.py
|
databroker/sources/dummy_sources/_metadataStore/api/analysis.py
|
headers = []
beamline_configs = []
event_descriptors = []
events = []
def find2(header_id=None, scan_id=None, owner=None, start_time=None,
beamline_id=None, end_time=None):
return {'headers': headers, 'beamline_configs': beamline_configs,
'event_descriptors': event_descriptors, 'events': events}
|
from datetime import datetime as dt
class DummyEventDescriptor(object):
def __init__(self):
self.keys = {'temp': {'source': 'PV:blah'},
'picture': {'source': 'CCD:blah',
'external': 'FILESTORE!!!!'}}
class DummyEvent(object):
def __init__(self):
self.ev_desc = DummyEventDescriptor()
self.data = {'temp': {'value': 273, 'timestamp': None},
'picture': {'value': 'np.ones((10, 10))',
'timestamp': None}}
self.time = dt(2014, 01, 01, 1, 2, 3)
def find(header_id=None, scan_id=None, owner=None, start_time=None,
beamline_id=None, end_time=None):
return 3 * [DummyEvent()]
|
Update dummy MDS to use find and return a list of events.
|
Update dummy MDS to use find and return a list of events.
|
Python
|
bsd-3-clause
|
ericdill/databroker,NSLS-II/datamuxer,ericdill/datamuxer,NSLS-II/dataportal,NSLS-II/dataportal,ericdill/databroker,danielballan/datamuxer,danielballan/dataportal,danielballan/dataportal,tacaswell/dataportal,ericdill/datamuxer,tacaswell/dataportal,danielballan/datamuxer
|
python
|
## Code Before:
headers = []
beamline_configs = []
event_descriptors = []
events = []
def find2(header_id=None, scan_id=None, owner=None, start_time=None,
beamline_id=None, end_time=None):
return {'headers': headers, 'beamline_configs': beamline_configs,
'event_descriptors': event_descriptors, 'events': events}
## Instruction:
Update dummy MDS to use find and return a list of events.
## Code After:
from datetime import datetime as dt
class DummyEventDescriptor(object):
def __init__(self):
self.keys = {'temp': {'source': 'PV:blah'},
'picture': {'source': 'CCD:blah',
'external': 'FILESTORE!!!!'}}
class DummyEvent(object):
def __init__(self):
self.ev_desc = DummyEventDescriptor()
self.data = {'temp': {'value': 273, 'timestamp': None},
'picture': {'value': 'np.ones((10, 10))',
'timestamp': None}}
self.time = dt(2014, 01, 01, 1, 2, 3)
def find(header_id=None, scan_id=None, owner=None, start_time=None,
beamline_id=None, end_time=None):
return 3 * [DummyEvent()]
|
...
from datetime import datetime as dt
class DummyEventDescriptor(object):
def __init__(self):
self.keys = {'temp': {'source': 'PV:blah'},
'picture': {'source': 'CCD:blah',
'external': 'FILESTORE!!!!'}}
class DummyEvent(object):
def __init__(self):
self.ev_desc = DummyEventDescriptor()
self.data = {'temp': {'value': 273, 'timestamp': None},
'picture': {'value': 'np.ones((10, 10))',
'timestamp': None}}
self.time = dt(2014, 01, 01, 1, 2, 3)
def find(header_id=None, scan_id=None, owner=None, start_time=None,
beamline_id=None, end_time=None):
return 3 * [DummyEvent()]
...
|
b836b2c39299cc6dbcbdbc8bcffe046f25909edc
|
test_portend.py
|
test_portend.py
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_listening_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_listening_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_addr_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_addr_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
@pytest.fixture(**build_addr_infos())
def nonlistening_addr(request):
af, socktype, proto, canonname, sa = request.param
return sa
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
def test_check_port_nonlistening(self, nonlistening_addr):
portend._check_port(*nonlistening_addr[:2])
|
Add tests for nonlistening addresses as well.
|
Add tests for nonlistening addresses as well.
|
Python
|
mit
|
jaraco/portend
|
python
|
## Code Before:
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_listening_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_listening_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
## Instruction:
Add tests for nonlistening addresses as well.
## Code After:
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_addr_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_addr_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
@pytest.fixture(**build_addr_infos())
def nonlistening_addr(request):
af, socktype, proto, canonname, sa = request.param
return sa
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
def test_check_port_nonlistening(self, nonlistening_addr):
portend._check_port(*nonlistening_addr[:2])
|
# ... existing code ...
af, = info[:1]
return str(af)
def build_addr_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_addr_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
# ... modified code ...
sock.close()
@pytest.fixture(**build_addr_infos())
def nonlistening_addr(request):
af, socktype, proto, canonname, sa = request.param
return sa
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
def test_check_port_nonlistening(self, nonlistening_addr):
portend._check_port(*nonlistening_addr[:2])
# ... rest of the code ...
|
09668c1818ef028e10669b9652e2f0ae255cc47e
|
src/pyscaffold/extensions/no_skeleton.py
|
src/pyscaffold/extensions/no_skeleton.py
|
from ..api import Extension, helpers
class NoSkeleton(Extension):
"""Omit creation of skeleton.py and test_skeleton.py"""
def activate(self, actions):
"""Activate extension
Args:
actions (list): list of actions to perform
Returns:
list: updated list of actions
"""
return self.register(
actions,
self.remove_files,
after='define_structure')
def remove_files(self, struct, opts):
"""Remove all skeleton files from structure
Args:
struct (dict): project representation as (possibly) nested
:obj:`dict`.
opts (dict): given options, see :obj:`create_project` for
an extensive list.
Returns:
struct, opts: updated project representation and options
"""
# Namespace is not yet applied so deleting from package is enough
file = [opts['project'], 'src', opts['package'], 'skeleton.py']
struct = helpers.reject(struct, file)
file = [opts['project'], 'tests', 'test_skeleton.py']
struct = helpers.reject(struct, file)
return struct, opts
|
from pathlib import PurePath as Path
from ..api import Extension, helpers
class NoSkeleton(Extension):
"""Omit creation of skeleton.py and test_skeleton.py"""
def activate(self, actions):
"""Activate extension
Args:
actions (list): list of actions to perform
Returns:
list: updated list of actions
"""
return self.register(
actions,
self.remove_files,
after='define_structure')
def remove_files(self, struct, opts):
"""Remove all skeleton files from structure
Args:
struct (dict): project representation as (possibly) nested
:obj:`dict`.
opts (dict): given options, see :obj:`create_project` for
an extensive list.
Returns:
struct, opts: updated project representation and options
"""
# Namespace is not yet applied so deleting from package is enough
file = Path(opts['project'], 'src', opts['package'], 'skeleton.py')
struct = helpers.reject(struct, file)
file = Path(opts['project'], 'tests', 'test_skeleton.py')
struct = helpers.reject(struct, file)
return struct, opts
|
Change extensions to use pathlib in helpers instead of lists
|
Change extensions to use pathlib in helpers instead of lists
|
Python
|
mit
|
blue-yonder/pyscaffold,blue-yonder/pyscaffold
|
python
|
## Code Before:
from ..api import Extension, helpers
class NoSkeleton(Extension):
"""Omit creation of skeleton.py and test_skeleton.py"""
def activate(self, actions):
"""Activate extension
Args:
actions (list): list of actions to perform
Returns:
list: updated list of actions
"""
return self.register(
actions,
self.remove_files,
after='define_structure')
def remove_files(self, struct, opts):
"""Remove all skeleton files from structure
Args:
struct (dict): project representation as (possibly) nested
:obj:`dict`.
opts (dict): given options, see :obj:`create_project` for
an extensive list.
Returns:
struct, opts: updated project representation and options
"""
# Namespace is not yet applied so deleting from package is enough
file = [opts['project'], 'src', opts['package'], 'skeleton.py']
struct = helpers.reject(struct, file)
file = [opts['project'], 'tests', 'test_skeleton.py']
struct = helpers.reject(struct, file)
return struct, opts
## Instruction:
Change extensions to use pathlib in helpers instead of lists
## Code After:
from pathlib import PurePath as Path
from ..api import Extension, helpers
class NoSkeleton(Extension):
"""Omit creation of skeleton.py and test_skeleton.py"""
def activate(self, actions):
"""Activate extension
Args:
actions (list): list of actions to perform
Returns:
list: updated list of actions
"""
return self.register(
actions,
self.remove_files,
after='define_structure')
def remove_files(self, struct, opts):
"""Remove all skeleton files from structure
Args:
struct (dict): project representation as (possibly) nested
:obj:`dict`.
opts (dict): given options, see :obj:`create_project` for
an extensive list.
Returns:
struct, opts: updated project representation and options
"""
# Namespace is not yet applied so deleting from package is enough
file = Path(opts['project'], 'src', opts['package'], 'skeleton.py')
struct = helpers.reject(struct, file)
file = Path(opts['project'], 'tests', 'test_skeleton.py')
struct = helpers.reject(struct, file)
return struct, opts
|
...
from pathlib import PurePath as Path
from ..api import Extension, helpers
...
struct, opts: updated project representation and options
"""
# Namespace is not yet applied so deleting from package is enough
file = Path(opts['project'], 'src', opts['package'], 'skeleton.py')
struct = helpers.reject(struct, file)
file = Path(opts['project'], 'tests', 'test_skeleton.py')
struct = helpers.reject(struct, file)
return struct, opts
...
|
8307590d20f3a2bdb7efaa7679bfd37d83358475
|
setup.py
|
setup.py
|
import os
from distutils.core import setup
from evelink import __version__
__readme_path = os.path.join(os.path.dirname(__file__), "README.md")
__readme_contents = open(__readme_path).read()
setup(
name="EVELink",
version=__version__,
description="Python Bindings for the EVE Online API",
long_description=__readme_contents,
license="MIT License",
author="Valkyries of Night",
author_email="[email protected]",
maintainer="Amber Yust",
maintainer_email="[email protected]",
url="https://github.com/eve-val/evelink",
download_url="https://github.com/eve-val/evelink/downloads",
packages=[
"evelink",
"evelink.cache",
"evelink.parsing",
],
scripts=["bin/evelink"],
provides=["evelink"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Games/Entertainment",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
# vim: set et ts=4 sts=4 sw=4:
|
import os
from distutils.core import setup
from evelink import __version__
__readme_path = os.path.join(os.path.dirname(__file__), "README.md")
__readme_contents = open(__readme_path).read()
setup(
name="EVELink",
version=__version__,
description="Python Bindings for the EVE Online API",
long_description=__readme_contents,
license="MIT License",
author="Valkyries of Night",
author_email="[email protected]",
maintainer="Amber Yust",
maintainer_email="[email protected]",
url="https://github.com/eve-val/evelink",
download_url="https://github.com/eve-val/evelink/downloads",
packages=[
"evelink",
"evelink.cache",
"evelink.parsing",
],
data_files=[
('', ['README.md', 'LICENSE']),
],
scripts=["bin/evelink"],
provides=["evelink"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Games/Entertainment",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
# vim: set et ts=4 sts=4 sw=4:
|
Include README.md and LICENSE in the package
|
Include README.md and LICENSE in the package
|
Python
|
mit
|
bastianh/evelink,Morloth1274/EVE-Online-POCO-manager,FashtimeDotCom/evelink,ayust/evelink,zigdon/evelink
|
python
|
## Code Before:
import os
from distutils.core import setup
from evelink import __version__
__readme_path = os.path.join(os.path.dirname(__file__), "README.md")
__readme_contents = open(__readme_path).read()
setup(
name="EVELink",
version=__version__,
description="Python Bindings for the EVE Online API",
long_description=__readme_contents,
license="MIT License",
author="Valkyries of Night",
author_email="[email protected]",
maintainer="Amber Yust",
maintainer_email="[email protected]",
url="https://github.com/eve-val/evelink",
download_url="https://github.com/eve-val/evelink/downloads",
packages=[
"evelink",
"evelink.cache",
"evelink.parsing",
],
scripts=["bin/evelink"],
provides=["evelink"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Games/Entertainment",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
# vim: set et ts=4 sts=4 sw=4:
## Instruction:
Include README.md and LICENSE in the package
## Code After:
import os
from distutils.core import setup
from evelink import __version__
__readme_path = os.path.join(os.path.dirname(__file__), "README.md")
__readme_contents = open(__readme_path).read()
setup(
name="EVELink",
version=__version__,
description="Python Bindings for the EVE Online API",
long_description=__readme_contents,
license="MIT License",
author="Valkyries of Night",
author_email="[email protected]",
maintainer="Amber Yust",
maintainer_email="[email protected]",
url="https://github.com/eve-val/evelink",
download_url="https://github.com/eve-val/evelink/downloads",
packages=[
"evelink",
"evelink.cache",
"evelink.parsing",
],
data_files=[
('', ['README.md', 'LICENSE']),
],
scripts=["bin/evelink"],
provides=["evelink"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Games/Entertainment",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
# vim: set et ts=4 sts=4 sw=4:
|
...
"evelink.cache",
"evelink.parsing",
],
data_files=[
('', ['README.md', 'LICENSE']),
],
scripts=["bin/evelink"],
provides=["evelink"],
classifiers=[
...
|
716d967971d9ea23ab54d327231ba873b681a7c7
|
isserviceup/services/models/service.py
|
isserviceup/services/models/service.py
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
Add icon_url as abstract property
|
Add icon_url as abstract property
|
Python
|
apache-2.0
|
marcopaz/is-service-up,marcopaz/is-service-up,marcopaz/is-service-up
|
python
|
## Code Before:
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
## Instruction:
Add icon_url as abstract property
## Code After:
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
// ... existing code ...
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.