commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
c135e9ac8fead8e9e58d2f34e5aa66354bd1b996
|
tests/test_route_requester.py
|
tests/test_route_requester.py
|
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
class TestOptionalParameters(unittest.TestCase):
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
def test_invalid_restrictions(self):
"""
Tests for invalid route restrictions
"""
with self.assertRaises(ValueError):
requester.set_route_restrictions("freeways", "railways")
class TestAPIKey(unittest.TestCase):
def test_invalid_api_key(self):
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main()
|
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
import os
MAPS_API_KEY = os.environ['MAPS_API_KEY']
class TestOptionalParameters(unittest.TestCase):
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
def test_invalid_restrictions(self):
"""
Tests for invalid route restrictions
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(ValueError):
requester.set_route_restrictions("freeways", "railways")
class TestAPIKey(unittest.TestCase):
def test_invalid_api_key(self):
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main()
|
Refactor tests to include API KEY
|
Refactor tests to include API KEY
|
Python
|
apache-2.0
|
apranav19/pydirections
|
python
|
## Code Before:
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
class TestOptionalParameters(unittest.TestCase):
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
def test_invalid_restrictions(self):
"""
Tests for invalid route restrictions
"""
with self.assertRaises(ValueError):
requester.set_route_restrictions("freeways", "railways")
class TestAPIKey(unittest.TestCase):
def test_invalid_api_key(self):
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main()
## Instruction:
Refactor tests to include API KEY
## Code After:
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
import os
MAPS_API_KEY = os.environ['MAPS_API_KEY']
class TestOptionalParameters(unittest.TestCase):
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
def test_invalid_restrictions(self):
"""
Tests for invalid route restrictions
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(ValueError):
requester.set_route_restrictions("freeways", "railways")
class TestAPIKey(unittest.TestCase):
def test_invalid_api_key(self):
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main()
|
...
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
import os
MAPS_API_KEY = os.environ['MAPS_API_KEY']
class TestOptionalParameters(unittest.TestCase):
def test_invalid_mode(self):
...
"""
Tests the is_valid_mode function for an invalid input
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
...
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
...
"""
Tests for invalid route restrictions
"""
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
with self.assertRaises(ValueError):
requester.set_route_restrictions("freeways", "railways")
class TestAPIKey(unittest.TestCase):
def test_invalid_api_key(self):
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY)
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
...
|
244b39f45693627566d262e3a9a78519f4fe71ff
|
src/main/java/com/magnet/mmx/protocol/Count.java
|
src/main/java/com/magnet/mmx/protocol/Count.java
|
package com.magnet.mmx.protocol;
/**
* Count object to represent the results of the operation
*/
public class Count {
private int requested;
private int sent;
private int unsent;
public Count(int requested, int sent, int unsent) {
this.requested = requested;
this.sent = sent;
this.unsent = unsent;
}
public Count() {
}
public int getRequested() {
return requested;
}
public void setRequested(int requested) {
this.requested = requested;
}
public int getSent() {
return sent;
}
public void setSent(int sent) {
this.sent = sent;
}
public int getUnsent() {
return unsent;
}
public void setUnsent(int unsent) {
this.unsent = unsent;
}
}
|
package com.magnet.mmx.protocol;
/**
* Count object to represent the results of the operation
*/
public class Count {
private int requested;
private int sent;
private int unsent;
public Count(int requested, int sent, int unsent) {
this.requested = requested;
this.sent = sent;
this.unsent = unsent;
}
public Count() {
}
public int getRequested() {
return requested;
}
public void setRequested(int requested) {
this.requested = requested;
}
public int getSent() {
return sent;
}
public void setSent(int sent) {
this.sent = sent;
}
public int getUnsent() {
return unsent;
}
public void setUnsent(int unsent) {
this.unsent = unsent;
}
@Override
public String toString() {
return "[requested="+requested+", sent="+sent+", unsent="+unsent+"]";
}
}
|
Add a toString() for debugging purpose.
|
Add a toString() for debugging purpose.
|
Java
|
apache-2.0
|
magnetsystems/message-common
|
java
|
## Code Before:
package com.magnet.mmx.protocol;
/**
* Count object to represent the results of the operation
*/
public class Count {
private int requested;
private int sent;
private int unsent;
public Count(int requested, int sent, int unsent) {
this.requested = requested;
this.sent = sent;
this.unsent = unsent;
}
public Count() {
}
public int getRequested() {
return requested;
}
public void setRequested(int requested) {
this.requested = requested;
}
public int getSent() {
return sent;
}
public void setSent(int sent) {
this.sent = sent;
}
public int getUnsent() {
return unsent;
}
public void setUnsent(int unsent) {
this.unsent = unsent;
}
}
## Instruction:
Add a toString() for debugging purpose.
## Code After:
package com.magnet.mmx.protocol;
/**
* Count object to represent the results of the operation
*/
public class Count {
private int requested;
private int sent;
private int unsent;
public Count(int requested, int sent, int unsent) {
this.requested = requested;
this.sent = sent;
this.unsent = unsent;
}
public Count() {
}
public int getRequested() {
return requested;
}
public void setRequested(int requested) {
this.requested = requested;
}
public int getSent() {
return sent;
}
public void setSent(int sent) {
this.sent = sent;
}
public int getUnsent() {
return unsent;
}
public void setUnsent(int unsent) {
this.unsent = unsent;
}
@Override
public String toString() {
return "[requested="+requested+", sent="+sent+", unsent="+unsent+"]";
}
}
|
// ... existing code ...
public void setUnsent(int unsent) {
this.unsent = unsent;
}
@Override
public String toString() {
return "[requested="+requested+", sent="+sent+", unsent="+unsent+"]";
}
}
// ... rest of the code ...
|
d56b16212167742937ab5b18508f75248830179b
|
src/main/java/prm4j/indexing/realtime/LowLevelBinding.java
|
src/main/java/prm4j/indexing/realtime/LowLevelBinding.java
|
/*
* Copyright (c) 2012 Mateusz Parzonka, Eric Bodden
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Mateusz Parzonka - initial API and implementation
*/
package prm4j.indexing.realtime;
import java.lang.ref.WeakReference;
import prm4j.indexing.map.MinimalMapEntry;
/**
* A binding used by optimized indexing strategies.
*/
public interface LowLevelBinding extends prm4j.api.Binding, MinimalMapEntry<Object, LowLevelBinding>{
/**
* Releases all resources used in the indexing data structure and/or notifies monitors about unreachability of the
* parameter object. Amount of released resources can vary strongly with the implementation.
*/
void release();
/**
* Register a map where this binding is used.
*
* @param mapReference
*/
void registerNode(WeakReference<Node> nodeReference); // TODO resource registration
boolean isDisabled();
void setDisabled(boolean disable);
long getTimestamp();
void setTimestamp(long timestamp);
}
|
/*
* Copyright (c) 2012 Mateusz Parzonka, Eric Bodden
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Mateusz Parzonka - initial API and implementation
*/
package prm4j.indexing.realtime;
import java.lang.ref.WeakReference;
import prm4j.indexing.map.MinimalMapEntry;
/**
* A binding used by optimized indexing strategies.
*/
public interface LowLevelBinding extends prm4j.api.Binding, MinimalMapEntry<Object, LowLevelBinding>{
/**
* Releases all resources used in the indexing data structure and/or notifies monitors about unreachability of the
* parameter object. Amount of released resources can vary strongly with the implementation.
*/
void release();
/**
* Register a map which uses this binding as key.
*
* @param nodeRef
*/
void registerNode(WeakReference<Node> nodeRef);
boolean isDisabled();
void setDisabled(boolean disable);
long getTimestamp();
void setTimestamp(long timestamp);
}
|
Remove todo and add comment
|
Remove todo and add comment
|
Java
|
epl-1.0
|
parzonka/prm4j
|
java
|
## Code Before:
/*
* Copyright (c) 2012 Mateusz Parzonka, Eric Bodden
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Mateusz Parzonka - initial API and implementation
*/
package prm4j.indexing.realtime;
import java.lang.ref.WeakReference;
import prm4j.indexing.map.MinimalMapEntry;
/**
* A binding used by optimized indexing strategies.
*/
public interface LowLevelBinding extends prm4j.api.Binding, MinimalMapEntry<Object, LowLevelBinding>{
/**
* Releases all resources used in the indexing data structure and/or notifies monitors about unreachability of the
* parameter object. Amount of released resources can vary strongly with the implementation.
*/
void release();
/**
* Register a map where this binding is used.
*
* @param mapReference
*/
void registerNode(WeakReference<Node> nodeReference); // TODO resource registration
boolean isDisabled();
void setDisabled(boolean disable);
long getTimestamp();
void setTimestamp(long timestamp);
}
## Instruction:
Remove todo and add comment
## Code After:
/*
* Copyright (c) 2012 Mateusz Parzonka, Eric Bodden
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Mateusz Parzonka - initial API and implementation
*/
package prm4j.indexing.realtime;
import java.lang.ref.WeakReference;
import prm4j.indexing.map.MinimalMapEntry;
/**
* A binding used by optimized indexing strategies.
*/
public interface LowLevelBinding extends prm4j.api.Binding, MinimalMapEntry<Object, LowLevelBinding>{
/**
* Releases all resources used in the indexing data structure and/or notifies monitors about unreachability of the
* parameter object. Amount of released resources can vary strongly with the implementation.
*/
void release();
/**
* Register a map which uses this binding as key.
*
* @param nodeRef
*/
void registerNode(WeakReference<Node> nodeRef);
boolean isDisabled();
void setDisabled(boolean disable);
long getTimestamp();
void setTimestamp(long timestamp);
}
|
...
void release();
/**
* Register a map which uses this binding as key.
*
* @param nodeRef
*/
void registerNode(WeakReference<Node> nodeRef);
boolean isDisabled();
...
|
133889c632db26636690c7b464af8cb567465563
|
android/src/main/java/com/bottomsheetbehavior/CoordinatorLayoutManager.java
|
android/src/main/java/com/bottomsheetbehavior/CoordinatorLayoutManager.java
|
package com.bottomsheetbehavior;
import android.support.design.widget.CoordinatorLayout;
import android.view.View;
import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.uimanager.ThemedReactContext;
public class CoordinatorLayoutManager extends ViewGroupManager<CoordinatorLayoutView> {
private final static String REACT_CLASS = "RCTCoordinatorLayoutAndroid";
@Override
public String getName() {
return REACT_CLASS;
}
@Override
public CoordinatorLayoutView createViewInstance(ThemedReactContext context) {
return new CoordinatorLayoutView(context);
}
@Override
public void addView(CoordinatorLayoutView parent, View child, int index) {
super.addView(parent, child, index);
// Sets FloatingActionButton anchor automatically
if (child instanceof FloatingActionButtonView) {
boolean autoAnchor = ((FloatingActionButtonView) child).getAutoAnchor();
for (int i = 0; i < parent.getChildCount(); i++) {
View childView = parent.getChildAt(i);
if (childView instanceof BottomSheetBehaviorView && autoAnchor) {
int bottomSheetId = childView.getId();
((CoordinatorLayout.LayoutParams) child.getLayoutParams()).setAnchorId(bottomSheetId);
}
}
}
}
@Override
public boolean needsCustomLayoutForChildren() {
return true;
}
}
|
package com.bottomsheetbehavior;
import android.support.design.widget.CoordinatorLayout;
import android.view.View;
import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.uimanager.ThemedReactContext;
public class CoordinatorLayoutManager extends ViewGroupManager<CoordinatorLayoutView> {
private final static String REACT_CLASS = "RCTCoordinatorLayoutAndroid";
@Override
public String getName() {
return REACT_CLASS;
}
@Override
public CoordinatorLayoutView createViewInstance(ThemedReactContext context) {
return new CoordinatorLayoutView(context);
}
@Override
public void addView(CoordinatorLayoutView parent, View child, int index) {
super.addView(parent, child, index);
// Sets FloatingActionButton anchor automatically
if (child instanceof FloatingActionButtonView) {
if (((FloatingActionButtonView) child).getAutoAnchor()) {
for (int i = 0; i < parent.getChildCount(); i++) {
View childView = parent.getChildAt(i);
if (childView instanceof BottomSheetBehaviorView) {
int bottomSheetId = childView.getId();
((CoordinatorLayout.LayoutParams) child.getLayoutParams()).setAnchorId(bottomSheetId);
}
}
}
}
}
@Override
public boolean needsCustomLayoutForChildren() {
return true;
}
}
|
Check autoAnchor before child iteration
|
Check autoAnchor before child iteration
|
Java
|
mit
|
cesardeazevedo/react-native-bottom-sheet-behavior,cesardeazevedo/react-native-bottom-sheet-behavior,cesardeazevedo/react-native-bottom-sheet-behavior
|
java
|
## Code Before:
package com.bottomsheetbehavior;
import android.support.design.widget.CoordinatorLayout;
import android.view.View;
import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.uimanager.ThemedReactContext;
public class CoordinatorLayoutManager extends ViewGroupManager<CoordinatorLayoutView> {
private final static String REACT_CLASS = "RCTCoordinatorLayoutAndroid";
@Override
public String getName() {
return REACT_CLASS;
}
@Override
public CoordinatorLayoutView createViewInstance(ThemedReactContext context) {
return new CoordinatorLayoutView(context);
}
@Override
public void addView(CoordinatorLayoutView parent, View child, int index) {
super.addView(parent, child, index);
// Sets FloatingActionButton anchor automatically
if (child instanceof FloatingActionButtonView) {
boolean autoAnchor = ((FloatingActionButtonView) child).getAutoAnchor();
for (int i = 0; i < parent.getChildCount(); i++) {
View childView = parent.getChildAt(i);
if (childView instanceof BottomSheetBehaviorView && autoAnchor) {
int bottomSheetId = childView.getId();
((CoordinatorLayout.LayoutParams) child.getLayoutParams()).setAnchorId(bottomSheetId);
}
}
}
}
@Override
public boolean needsCustomLayoutForChildren() {
return true;
}
}
## Instruction:
Check autoAnchor before child iteration
## Code After:
package com.bottomsheetbehavior;
import android.support.design.widget.CoordinatorLayout;
import android.view.View;
import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.uimanager.ThemedReactContext;
public class CoordinatorLayoutManager extends ViewGroupManager<CoordinatorLayoutView> {
private final static String REACT_CLASS = "RCTCoordinatorLayoutAndroid";
@Override
public String getName() {
return REACT_CLASS;
}
@Override
public CoordinatorLayoutView createViewInstance(ThemedReactContext context) {
return new CoordinatorLayoutView(context);
}
@Override
public void addView(CoordinatorLayoutView parent, View child, int index) {
super.addView(parent, child, index);
// Sets FloatingActionButton anchor automatically
if (child instanceof FloatingActionButtonView) {
if (((FloatingActionButtonView) child).getAutoAnchor()) {
for (int i = 0; i < parent.getChildCount(); i++) {
View childView = parent.getChildAt(i);
if (childView instanceof BottomSheetBehaviorView) {
int bottomSheetId = childView.getId();
((CoordinatorLayout.LayoutParams) child.getLayoutParams()).setAnchorId(bottomSheetId);
}
}
}
}
}
@Override
public boolean needsCustomLayoutForChildren() {
return true;
}
}
|
# ... existing code ...
// Sets FloatingActionButton anchor automatically
if (child instanceof FloatingActionButtonView) {
if (((FloatingActionButtonView) child).getAutoAnchor()) {
for (int i = 0; i < parent.getChildCount(); i++) {
View childView = parent.getChildAt(i);
if (childView instanceof BottomSheetBehaviorView) {
int bottomSheetId = childView.getId();
((CoordinatorLayout.LayoutParams) child.getLayoutParams()).setAnchorId(bottomSheetId);
}
}
}
}
# ... rest of the code ...
|
31c68ae56801377327e2cc0901222a9d961a6502
|
tests/integration/test_skytap.py
|
tests/integration/test_skytap.py
|
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
|
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
|
Add stub for integration test.
|
Add stub for integration test.
|
Python
|
agpl-3.0
|
open-craft/xblock-skytap,open-craft/xblock-skytap,open-craft/xblock-skytap
|
python
|
## Code Before:
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
## Instruction:
Add stub for integration test.
## Code After:
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
|
...
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
...
|
78df776f31e5a23213b7f9d162a71954a667950a
|
opps/views/tests/__init__.py
|
opps/views/tests/__init__.py
|
from opps.views.tests.test_generic_detail import *
|
from opps.views.tests.test_generic_detail import *
from opps.views.tests.test_generic_list import *
|
Add test_generic_list on tests views
|
Add test_generic_list on tests views
|
Python
|
mit
|
williamroot/opps,opps/opps,jeanmask/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps
|
python
|
## Code Before:
from opps.views.tests.test_generic_detail import *
## Instruction:
Add test_generic_list on tests views
## Code After:
from opps.views.tests.test_generic_detail import *
from opps.views.tests.test_generic_list import *
|
# ... existing code ...
from opps.views.tests.test_generic_detail import *
from opps.views.tests.test_generic_list import *
# ... rest of the code ...
|
0afbd19a7750d5b8dd45a8aeed0e5be0df88b26e
|
src/main/java/net/shadowfacts/foodies/recipe/FRecipes.java
|
src/main/java/net/shadowfacts/foodies/recipe/FRecipes.java
|
package net.shadowfacts.foodies.recipe;
import cpw.mods.fml.common.registry.GameRegistry;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.shadowfacts.foodies.item.FItems;
/**
* A helper class for registering recipes.
* @author shadowfacts
*/
public class FRecipes {
public static void preInit() {
GameRegistry.addSmelting(Items.bread, new ItemStack(FItems.toast, 1), 0.2f);
}
public static void load() {
}
public static void postInit() {
}
}
|
package net.shadowfacts.foodies.recipe;
import cpw.mods.fml.common.registry.GameRegistry;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.shadowfacts.foodies.item.FItems;
/**
* A helper class for registering recipes.
* @author shadowfacts
*/
public class FRecipes {
public static void preInit() {
// Shapeless
GameRegistry.addShapelessRecipe(new ItemStack(FItems.hamburger), new ItemStack(FItems.toast), new ItemStack(FItems.beefPattie), new ItemStack(FItems.toast));
GameRegistry.addShapelessRecipe(new ItemStack(FItems.cheeseburger), new ItemStack(FItems.hamburger), new ItemStack(FItems.cheese));
// Shaped
registerSmelting();
}
private static void registerSmelting() {
// Smelting
GameRegistry.addSmelting(Items.bread, new ItemStack(FItems.toast), 0.2f);
GameRegistry.addSmelting(Items.cooked_beef, new ItemStack(FItems.beefPattie), 0.3f);
}
public static void load() {
}
public static void postInit() {
}
}
|
Add recipes for new things
|
Add recipes for new things
|
Java
|
lgpl-2.1
|
shadowfacts/Foodies
|
java
|
## Code Before:
package net.shadowfacts.foodies.recipe;
import cpw.mods.fml.common.registry.GameRegistry;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.shadowfacts.foodies.item.FItems;
/**
* A helper class for registering recipes.
* @author shadowfacts
*/
public class FRecipes {
public static void preInit() {
GameRegistry.addSmelting(Items.bread, new ItemStack(FItems.toast, 1), 0.2f);
}
public static void load() {
}
public static void postInit() {
}
}
## Instruction:
Add recipes for new things
## Code After:
package net.shadowfacts.foodies.recipe;
import cpw.mods.fml.common.registry.GameRegistry;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.shadowfacts.foodies.item.FItems;
/**
* A helper class for registering recipes.
* @author shadowfacts
*/
public class FRecipes {
public static void preInit() {
// Shapeless
GameRegistry.addShapelessRecipe(new ItemStack(FItems.hamburger), new ItemStack(FItems.toast), new ItemStack(FItems.beefPattie), new ItemStack(FItems.toast));
GameRegistry.addShapelessRecipe(new ItemStack(FItems.cheeseburger), new ItemStack(FItems.hamburger), new ItemStack(FItems.cheese));
// Shaped
registerSmelting();
}
private static void registerSmelting() {
// Smelting
GameRegistry.addSmelting(Items.bread, new ItemStack(FItems.toast), 0.2f);
GameRegistry.addSmelting(Items.cooked_beef, new ItemStack(FItems.beefPattie), 0.3f);
}
public static void load() {
}
public static void postInit() {
}
}
|
# ... existing code ...
*/
public class FRecipes {
public static void preInit() {
// Shapeless
GameRegistry.addShapelessRecipe(new ItemStack(FItems.hamburger), new ItemStack(FItems.toast), new ItemStack(FItems.beefPattie), new ItemStack(FItems.toast));
GameRegistry.addShapelessRecipe(new ItemStack(FItems.cheeseburger), new ItemStack(FItems.hamburger), new ItemStack(FItems.cheese));
// Shaped
registerSmelting();
}
private static void registerSmelting() {
// Smelting
GameRegistry.addSmelting(Items.bread, new ItemStack(FItems.toast), 0.2f);
GameRegistry.addSmelting(Items.cooked_beef, new ItemStack(FItems.beefPattie), 0.3f);
}
public static void load() {
# ... rest of the code ...
|
d7c8c43854f30d46f9a6d7367700e4fe5dc0b48d
|
setup.py
|
setup.py
|
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='[email protected]',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
}
)
|
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='[email protected]',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
|
Add gevent as an extra requirement
|
Add gevent as an extra requirement
|
Python
|
apache-2.0
|
daniellawrence/graphitesend,numberly/graphitesend,PabloLefort/graphitesend
|
python
|
## Code Before:
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='[email protected]',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
}
)
## Instruction:
Add gevent as an extra requirement
## Code After:
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='[email protected]',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
|
...
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
...
|
b5098ce702d590e665ecd01034385420dc506c75
|
Signs/src/main/java/tk/martijn_heil/kingdomessentials/signs/ExecutableSignRegister.java
|
Signs/src/main/java/tk/martijn_heil/kingdomessentials/signs/ExecutableSignRegister.java
|
package tk.martijn_heil.kingdomessentials.signs;
import lombok.Getter;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
public class ExecutableSignRegister
{
@Getter private List<ExecutableSign> registeredSigns = new ArrayList<>();
public void addExecutableSign(ExecutableSign sign)
{
checkArgument(!registeredSigns.contains(sign), "this sign is already registered.");
}
@Nullable
public ExecutableSign get(String action)
{
for (ExecutableSign sign : registeredSigns)
{
if(sign.getAction().equals(action)) return sign;
}
return null;
}
}
|
package tk.martijn_heil.kingdomessentials.signs;
import lombok.Getter;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
public class ExecutableSignRegister
{
@Getter private List<ExecutableSign> registeredSigns = new ArrayList<>();
public void addExecutableSign(ExecutableSign sign)
{
checkArgument(!registeredSigns.contains(sign), "this sign is already registered.");
this.registeredSigns.add(sign);
}
@Nullable
public ExecutableSign get(String action)
{
for (ExecutableSign sign : registeredSigns)
{
if(sign.getAction().equals(action)) return sign;
}
return null;
}
}
|
Fix a new ExecutableSign not being added to the register
|
Fix a new ExecutableSign not being added to the register
|
Java
|
mit
|
martijn-heil/KingdomEssentials
|
java
|
## Code Before:
package tk.martijn_heil.kingdomessentials.signs;
import lombok.Getter;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
public class ExecutableSignRegister
{
@Getter private List<ExecutableSign> registeredSigns = new ArrayList<>();
public void addExecutableSign(ExecutableSign sign)
{
checkArgument(!registeredSigns.contains(sign), "this sign is already registered.");
}
@Nullable
public ExecutableSign get(String action)
{
for (ExecutableSign sign : registeredSigns)
{
if(sign.getAction().equals(action)) return sign;
}
return null;
}
}
## Instruction:
Fix a new ExecutableSign not being added to the register
## Code After:
package tk.martijn_heil.kingdomessentials.signs;
import lombok.Getter;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
public class ExecutableSignRegister
{
@Getter private List<ExecutableSign> registeredSigns = new ArrayList<>();
public void addExecutableSign(ExecutableSign sign)
{
checkArgument(!registeredSigns.contains(sign), "this sign is already registered.");
this.registeredSigns.add(sign);
}
@Nullable
public ExecutableSign get(String action)
{
for (ExecutableSign sign : registeredSigns)
{
if(sign.getAction().equals(action)) return sign;
}
return null;
}
}
|
# ... existing code ...
public void addExecutableSign(ExecutableSign sign)
{
checkArgument(!registeredSigns.contains(sign), "this sign is already registered.");
this.registeredSigns.add(sign);
}
# ... rest of the code ...
|
324941bb4946cea19800fb1102035bd32e8028db
|
apps/profiles/views.py
|
apps/profiles/views.py
|
from django.views.generic import DetailView, UpdateView
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import User
class ProfileDetailView(DetailView):
'''
Displays the user profile information
'''
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
def get(self, request, *args, **kwargs):
user = request.user
username = self.kwargs.get(self.slug_url_kwarg)
if user.is_authenticated() and not username:
return redirect('profile_detail', username=user.username)
elif not user.is_authenticated() and not username:
return redirect_to_login(reverse('profile_detail_me'))
return super(ProfileDetailView, self).get(request, *args, **kwargs)
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
|
from django.views.generic import DetailView, UpdateView
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import User
class ProfileDetailView(DetailView):
'''
Displays the user profile information
'''
queryset = User.objects.select_related('location', 'location__country')
slug_field = 'username'
slug_url_kwarg = 'username'
def get(self, request, *args, **kwargs):
user = request.user
username = self.kwargs.get(self.slug_url_kwarg)
if user.is_authenticated() and not username:
return redirect('profile_detail', username=user.username)
elif not user.is_authenticated() and not username:
return redirect_to_login(reverse('profile_detail_me'))
return super(ProfileDetailView, self).get(request, *args, **kwargs)
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
|
Use select_related in user profile detail view
|
Use select_related in user profile detail view
|
Python
|
mit
|
SoPR/horas,SoPR/horas,SoPR/horas,SoPR/horas
|
python
|
## Code Before:
from django.views.generic import DetailView, UpdateView
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import User
class ProfileDetailView(DetailView):
'''
Displays the user profile information
'''
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
def get(self, request, *args, **kwargs):
user = request.user
username = self.kwargs.get(self.slug_url_kwarg)
if user.is_authenticated() and not username:
return redirect('profile_detail', username=user.username)
elif not user.is_authenticated() and not username:
return redirect_to_login(reverse('profile_detail_me'))
return super(ProfileDetailView, self).get(request, *args, **kwargs)
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
## Instruction:
Use select_related in user profile detail view
## Code After:
from django.views.generic import DetailView, UpdateView
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import User
class ProfileDetailView(DetailView):
'''
Displays the user profile information
'''
queryset = User.objects.select_related('location', 'location__country')
slug_field = 'username'
slug_url_kwarg = 'username'
def get(self, request, *args, **kwargs):
user = request.user
username = self.kwargs.get(self.slug_url_kwarg)
if user.is_authenticated() and not username:
return redirect('profile_detail', username=user.username)
elif not user.is_authenticated() and not username:
return redirect_to_login(reverse('profile_detail_me'))
return super(ProfileDetailView, self).get(request, *args, **kwargs)
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
|
// ... existing code ...
'''
Displays the user profile information
'''
queryset = User.objects.select_related('location', 'location__country')
slug_field = 'username'
slug_url_kwarg = 'username'
// ... rest of the code ...
|
599ded62c8183c2fc4e5ecbd9ea399154c28c8a1
|
android/src/se/hyperlab/tigcm/ListenerService.java
|
android/src/se/hyperlab/tigcm/ListenerService.java
|
package se.hyperlab.tigcm;
import android.content.Intent;
import android.os.Bundle;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.kroll.KrollRuntime;
import org.appcelerator.titanium.TiApplication;
import java.util.HashMap;
import com.google.android.gms.gcm.GcmListenerService;
public class ListenerService extends GcmListenerService {
private static final String TAG = "ListenerService";
@Override
public void onMessageReceived(String from, Bundle rawData) {
Log.d(TAG, "Received message from: " + from);
HashMap<String, Object> data = TiGCMModule.bundleToHashMap(rawData);
TiGCMModule module = TiGCMModule.getInstance();
if(module != null) {
if(KrollRuntime.isInitialized() && TiApplication.isCurrentActivityInForeground()) {
module.fireMessage(data, true);
return;
}
}
NotificationPublisher.createNotification(this, data);
}
}
|
package se.hyperlab.tigcm;
import android.content.Intent;
import android.os.Bundle;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.kroll.KrollRuntime;
import org.appcelerator.titanium.TiApplication;
import java.util.HashMap;
import com.google.android.gms.gcm.GcmListenerService;
public class ListenerService extends GcmListenerService {
private static final String TAG = "ListenerService";
@Override
public void onMessageReceived(String from, Bundle rawData) {
Log.d(TAG, "Received message from: " + from);
HashMap<String, Object> data = TiGCMModule.bundleToHashMap(rawData);
boolean forceCreateNotification = false;
if (data.containsKey("forceCreateNotification")) {
if (data.get("forceCreateNotification").equals("true")) {
forceCreateNotification = true;
}
}
TiGCMModule module = TiGCMModule.getInstance();
if(module != null && !forceCreateNotification) {
if(KrollRuntime.isInitialized() && TiApplication.isCurrentActivityInForeground()) {
module.fireMessage(data, true);
return;
}
}
NotificationPublisher.createNotification(this, data);
}
}
|
Add forceCreateNotification flag to create notification even if the app is in foreground.
|
Add forceCreateNotification flag to create notification even if the app is in foreground.
|
Java
|
mit
|
hyperlab/TiAndroidNotifications,hyperlab/TiAndroidNotifications
|
java
|
## Code Before:
package se.hyperlab.tigcm;
import android.content.Intent;
import android.os.Bundle;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.kroll.KrollRuntime;
import org.appcelerator.titanium.TiApplication;
import java.util.HashMap;
import com.google.android.gms.gcm.GcmListenerService;
public class ListenerService extends GcmListenerService {
private static final String TAG = "ListenerService";
@Override
public void onMessageReceived(String from, Bundle rawData) {
Log.d(TAG, "Received message from: " + from);
HashMap<String, Object> data = TiGCMModule.bundleToHashMap(rawData);
TiGCMModule module = TiGCMModule.getInstance();
if(module != null) {
if(KrollRuntime.isInitialized() && TiApplication.isCurrentActivityInForeground()) {
module.fireMessage(data, true);
return;
}
}
NotificationPublisher.createNotification(this, data);
}
}
## Instruction:
Add forceCreateNotification flag to create notification even if the app is in foreground.
## Code After:
package se.hyperlab.tigcm;
import android.content.Intent;
import android.os.Bundle;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.kroll.KrollRuntime;
import org.appcelerator.titanium.TiApplication;
import java.util.HashMap;
import com.google.android.gms.gcm.GcmListenerService;
public class ListenerService extends GcmListenerService {
private static final String TAG = "ListenerService";
@Override
public void onMessageReceived(String from, Bundle rawData) {
Log.d(TAG, "Received message from: " + from);
HashMap<String, Object> data = TiGCMModule.bundleToHashMap(rawData);
boolean forceCreateNotification = false;
if (data.containsKey("forceCreateNotification")) {
if (data.get("forceCreateNotification").equals("true")) {
forceCreateNotification = true;
}
}
TiGCMModule module = TiGCMModule.getInstance();
if(module != null && !forceCreateNotification) {
if(KrollRuntime.isInitialized() && TiApplication.isCurrentActivityInForeground()) {
module.fireMessage(data, true);
return;
}
}
NotificationPublisher.createNotification(this, data);
}
}
|
...
Log.d(TAG, "Received message from: " + from);
HashMap<String, Object> data = TiGCMModule.bundleToHashMap(rawData);
boolean forceCreateNotification = false;
if (data.containsKey("forceCreateNotification")) {
if (data.get("forceCreateNotification").equals("true")) {
forceCreateNotification = true;
}
}
TiGCMModule module = TiGCMModule.getInstance();
if(module != null && !forceCreateNotification) {
if(KrollRuntime.isInitialized() && TiApplication.isCurrentActivityInForeground()) {
module.fireMessage(data, true);
return;
...
|
8d235a76120aadcd555da3d641f509541f525eb8
|
csunplugged/utils/retrieve_query_parameter.py
|
csunplugged/utils/retrieve_query_parameter.py
|
"""Module for retrieving a GET request query parameter."""
from django.http import Http404
def retrieve_query_parameter(request, parameter, valid_options=None):
"""Retrieve the query parameter.
If the parameter cannot be found, or is not found in the list of
valid options, then a 404 error is raised.
Args:
request: Request object (Request).
parameter: Parameter to retrieve (str).
valid_options: If provided, a list of valid options (list of str).
Returns:
String value of parameter.
"""
value = request.get(parameter, None)
if value is None:
raise Http404("{} parameter not specified.".format(parameter))
if valid_options and value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return value
|
"""Module for retrieving a GET request query parameter."""
from django.http import Http404
def retrieve_query_parameter(request, parameter, valid_options=None):
"""Retrieve the query parameter.
If the parameter cannot be found, or is not found in the list of
valid options, then a 404 error is raised.
Args:
request: Request object (Request).
parameter: Parameter to retrieve (str).
valid_options: If provided, a list of valid options (list of str).
Returns:
String value of parameter.
"""
value = request.get(parameter, None)
if value is None:
raise Http404("{} parameter not specified.".format(parameter))
if valid_options and value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return value
def retrieve_query_parameter_list(request, parameter, valid_options=None):
"""Retrieve the query parameter list.
If the parameter cannot be found, or is not found in the list of
valid options, then a 404 error is raised.
Args:
request: Request object (Request).
parameter: Parameter to retrieve (str).
valid_options: If provided, a list of valid options (list of str).
Returns:
List of strings of values of parameter.
"""
values = request.getlist(parameter, None)
if values is None:
raise Http404("{} parameter not specified.".format(parameter))
if valid_options:
for value in values:
if value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return values
|
Add function to get list of parameters
|
Add function to get list of parameters
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
python
|
## Code Before:
"""Module for retrieving a GET request query parameter."""
from django.http import Http404
def retrieve_query_parameter(request, parameter, valid_options=None):
"""Retrieve the query parameter.
If the parameter cannot be found, or is not found in the list of
valid options, then a 404 error is raised.
Args:
request: Request object (Request).
parameter: Parameter to retrieve (str).
valid_options: If provided, a list of valid options (list of str).
Returns:
String value of parameter.
"""
value = request.get(parameter, None)
if value is None:
raise Http404("{} parameter not specified.".format(parameter))
if valid_options and value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return value
## Instruction:
Add function to get list of parameters
## Code After:
"""Module for retrieving a GET request query parameter."""
from django.http import Http404
def retrieve_query_parameter(request, parameter, valid_options=None):
"""Retrieve the query parameter.
If the parameter cannot be found, or is not found in the list of
valid options, then a 404 error is raised.
Args:
request: Request object (Request).
parameter: Parameter to retrieve (str).
valid_options: If provided, a list of valid options (list of str).
Returns:
String value of parameter.
"""
value = request.get(parameter, None)
if value is None:
raise Http404("{} parameter not specified.".format(parameter))
if valid_options and value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return value
def retrieve_query_parameter_list(request, parameter, valid_options=None):
"""Retrieve the query parameter list.
If the parameter cannot be found, or is not found in the list of
valid options, then a 404 error is raised.
Args:
request: Request object (Request).
parameter: Parameter to retrieve (str).
valid_options: If provided, a list of valid options (list of str).
Returns:
List of strings of values of parameter.
"""
values = request.getlist(parameter, None)
if values is None:
raise Http404("{} parameter not specified.".format(parameter))
if valid_options:
for value in values:
if value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return values
|
...
if valid_options and value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return value
def retrieve_query_parameter_list(request, parameter, valid_options=None):
"""Retrieve the query parameter list.
If the parameter cannot be found, or is not found in the list of
valid options, then a 404 error is raised.
Args:
request: Request object (Request).
parameter: Parameter to retrieve (str).
valid_options: If provided, a list of valid options (list of str).
Returns:
List of strings of values of parameter.
"""
values = request.getlist(parameter, None)
if values is None:
raise Http404("{} parameter not specified.".format(parameter))
if valid_options:
for value in values:
if value not in valid_options:
raise Http404("{} parameter not valid.".format(parameter))
return values
...
|
6af828b3f541adf0c42e73d76d7998bc21a072cb
|
sample_proj/polls/filters.py
|
sample_proj/polls/filters.py
|
from datafilters.filterform import FilterForm
from datafilters.specs import (GenericSpec, DateFieldFilterSpec,
GreaterThanFilterSpec, ContainsFilterSpec,
GreaterThanZeroFilterSpec)
class PollsFilterForm(FilterForm):
has_exact_votes = GenericSpec('choice__votes')
has_choice_with_votes = GreaterThanZeroFilterSpec('choice__votes')
pub_date = DateFieldFilterSpec('pub_date', label='Date of publishing')
has_major_choice = GreaterThanFilterSpec('choice__votes', value=50)
question_contains = ContainsFilterSpec('question')
choice_contains = ContainsFilterSpec('choice__choice_text')
|
from datafilters.filterform import FilterForm
from datafilters.filterspec import FilterSpec
from datafilters.specs import (DateFieldFilterSpec,
GreaterThanFilterSpec, ContainsFilterSpec,
GreaterThanZeroFilterSpec)
class PollsFilterForm(FilterForm):
has_exact_votes = FilterSpec('choice__votes')
has_choice_with_votes = GreaterThanZeroFilterSpec('choice__votes')
pub_date = DateFieldFilterSpec('pub_date', label='Date of publishing')
has_major_choice = GreaterThanFilterSpec('choice__votes', value=50)
question_contains = ContainsFilterSpec('question')
choice_contains = ContainsFilterSpec('choice__choice_text')
|
Use FilterSpec instead of GenericSpec in sample_proj
|
Use FilterSpec instead of GenericSpec in sample_proj
|
Python
|
mit
|
zorainc/django-datafilters,freevoid/django-datafilters,zorainc/django-datafilters
|
python
|
## Code Before:
from datafilters.filterform import FilterForm
from datafilters.specs import (GenericSpec, DateFieldFilterSpec,
GreaterThanFilterSpec, ContainsFilterSpec,
GreaterThanZeroFilterSpec)
class PollsFilterForm(FilterForm):
has_exact_votes = GenericSpec('choice__votes')
has_choice_with_votes = GreaterThanZeroFilterSpec('choice__votes')
pub_date = DateFieldFilterSpec('pub_date', label='Date of publishing')
has_major_choice = GreaterThanFilterSpec('choice__votes', value=50)
question_contains = ContainsFilterSpec('question')
choice_contains = ContainsFilterSpec('choice__choice_text')
## Instruction:
Use FilterSpec instead of GenericSpec in sample_proj
## Code After:
from datafilters.filterform import FilterForm
from datafilters.filterspec import FilterSpec
from datafilters.specs import (DateFieldFilterSpec,
GreaterThanFilterSpec, ContainsFilterSpec,
GreaterThanZeroFilterSpec)
class PollsFilterForm(FilterForm):
has_exact_votes = FilterSpec('choice__votes')
has_choice_with_votes = GreaterThanZeroFilterSpec('choice__votes')
pub_date = DateFieldFilterSpec('pub_date', label='Date of publishing')
has_major_choice = GreaterThanFilterSpec('choice__votes', value=50)
question_contains = ContainsFilterSpec('question')
choice_contains = ContainsFilterSpec('choice__choice_text')
|
...
from datafilters.filterform import FilterForm
from datafilters.filterspec import FilterSpec
from datafilters.specs import (DateFieldFilterSpec,
GreaterThanFilterSpec, ContainsFilterSpec,
GreaterThanZeroFilterSpec)
class PollsFilterForm(FilterForm):
has_exact_votes = FilterSpec('choice__votes')
has_choice_with_votes = GreaterThanZeroFilterSpec('choice__votes')
pub_date = DateFieldFilterSpec('pub_date', label='Date of publishing')
has_major_choice = GreaterThanFilterSpec('choice__votes', value=50)
...
|
f63c37597a51f738bbd478afaf2d21b10741dc91
|
kid_readout/utils/easync.py
|
kid_readout/utils/easync.py
|
import netCDF4
class EasyGroup(object):
def __repr__(self):
return "EasyNC: %s %s" % (self._filename,self.group.path)
def __str__(self):
return self.__repr__()
def __init__(self,group,filename):
self._filename = filename
self.group = group
self.groups = group.groups
self.variables = group.variables
self.dimensions = group.dimensions
for gname in group.groups.keys():
if hasattr(self,gname):
print self,"already has an attribute",gname,"skipping"
continue
self.__setattr__(gname,EasyGroup(group.groups[gname],self._filename))
for vname in group.variables.keys():
if hasattr(self,vname):
print self,"already has an attribute",vname,"skipping"
continue
self.__setattr__(vname,group.variables[vname])
for dname in group.dimensions.keys():
dimname = "dim_" + dname
if hasattr(self,dimname):
print self,"already has an attribute",dimname,"skipping"
continue
self.__setattr__(dimname,group.dimensions[dname])
def EasyNetCDF4(*args,**kwargs):
nc = netCDF4.Dataset(*args,**kwargs)
if len(args) > 0:
fn = args[0]
else:
fn = kwargs['filename']
return EasyGroup(nc,fn)
|
import netCDF4
class EasyGroup(object):
def __repr__(self):
return "EasyNC: %s %s" % (self._filename,self.group.path)
def __str__(self):
return self.__repr__()
def __init__(self,group,filename):
self._filename = filename
self.group = group
self.groups = group.groups
self.variables = group.variables
self.dimensions = group.dimensions
for gname in group.groups.keys():
if hasattr(self,gname):
print self,"already has an attribute",gname,"skipping"
continue
self.__setattr__(gname,EasyGroup(group.groups[gname],self._filename))
for vname in group.variables.keys():
if hasattr(self,vname):
print self,"already has an attribute",vname,"skipping"
continue
self.__setattr__(vname,group.variables[vname])
for dname in group.dimensions.keys():
dimname = "dim_" + dname
if hasattr(self,dimname):
print self,"already has an attribute",dimname,"skipping"
continue
self.__setattr__(dimname,group.dimensions[dname])
def EasyNetCDF4(*args,**kwargs):
nc = netCDF4.Dataset(*args,**kwargs)
if len(args) > 0:
fn = args[0]
else:
fn = kwargs['filename']
enc = EasyGroup(nc,fn)
enc.close = nc.close
enc.sync = nc.sync
return enc
|
Add easy access to close and sync methods of nc files
|
Add easy access to close and sync methods of nc files
|
Python
|
bsd-2-clause
|
ColumbiaCMB/kid_readout,ColumbiaCMB/kid_readout
|
python
|
## Code Before:
import netCDF4
class EasyGroup(object):
def __repr__(self):
return "EasyNC: %s %s" % (self._filename,self.group.path)
def __str__(self):
return self.__repr__()
def __init__(self,group,filename):
self._filename = filename
self.group = group
self.groups = group.groups
self.variables = group.variables
self.dimensions = group.dimensions
for gname in group.groups.keys():
if hasattr(self,gname):
print self,"already has an attribute",gname,"skipping"
continue
self.__setattr__(gname,EasyGroup(group.groups[gname],self._filename))
for vname in group.variables.keys():
if hasattr(self,vname):
print self,"already has an attribute",vname,"skipping"
continue
self.__setattr__(vname,group.variables[vname])
for dname in group.dimensions.keys():
dimname = "dim_" + dname
if hasattr(self,dimname):
print self,"already has an attribute",dimname,"skipping"
continue
self.__setattr__(dimname,group.dimensions[dname])
def EasyNetCDF4(*args,**kwargs):
nc = netCDF4.Dataset(*args,**kwargs)
if len(args) > 0:
fn = args[0]
else:
fn = kwargs['filename']
return EasyGroup(nc,fn)
## Instruction:
Add easy access to close and sync methods of nc files
## Code After:
import netCDF4
class EasyGroup(object):
def __repr__(self):
return "EasyNC: %s %s" % (self._filename,self.group.path)
def __str__(self):
return self.__repr__()
def __init__(self,group,filename):
self._filename = filename
self.group = group
self.groups = group.groups
self.variables = group.variables
self.dimensions = group.dimensions
for gname in group.groups.keys():
if hasattr(self,gname):
print self,"already has an attribute",gname,"skipping"
continue
self.__setattr__(gname,EasyGroup(group.groups[gname],self._filename))
for vname in group.variables.keys():
if hasattr(self,vname):
print self,"already has an attribute",vname,"skipping"
continue
self.__setattr__(vname,group.variables[vname])
for dname in group.dimensions.keys():
dimname = "dim_" + dname
if hasattr(self,dimname):
print self,"already has an attribute",dimname,"skipping"
continue
self.__setattr__(dimname,group.dimensions[dname])
def EasyNetCDF4(*args,**kwargs):
nc = netCDF4.Dataset(*args,**kwargs)
if len(args) > 0:
fn = args[0]
else:
fn = kwargs['filename']
enc = EasyGroup(nc,fn)
enc.close = nc.close
enc.sync = nc.sync
return enc
|
// ... existing code ...
fn = args[0]
else:
fn = kwargs['filename']
enc = EasyGroup(nc,fn)
enc.close = nc.close
enc.sync = nc.sync
return enc
// ... rest of the code ...
|
ede7158c611bf618ee03989d33c5fe6a091b7d66
|
tests/testapp/models.py
|
tests/testapp/models.py
|
from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
|
from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
try:
from django.utils.encoding import python_2_unicode_compatible
except ImportError:
def python_2_unicode_compatible(c):
return c
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
|
Add shim for python_2_unicode_compatible in tests
|
Add shim for python_2_unicode_compatible in tests
|
Python
|
mit
|
dfunckt/django-rules,dfunckt/django-rules,ticosax/django-rules,ticosax/django-rules,dfunckt/django-rules,ticosax/django-rules
|
python
|
## Code Before:
from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
## Instruction:
Add shim for python_2_unicode_compatible in tests
## Code After:
from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
try:
from django.utils.encoding import python_2_unicode_compatible
except ImportError:
def python_2_unicode_compatible(c):
return c
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
|
# ... existing code ...
from django.conf import settings
from django.db import models
try:
from django.utils.encoding import python_2_unicode_compatible
except ImportError:
def python_2_unicode_compatible(c):
return c
import rules
# ... rest of the code ...
|
143b74a2c6f99d2d92ac85310351327ffb630c1e
|
uscampgrounds/admin.py
|
uscampgrounds/admin.py
|
from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
admin.site.register(Campground, CampgroundAdmin)
|
from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
search_fields = ('name',)
admin.site.register(Campground, CampgroundAdmin)
|
Allow searching campgrounds by name for convenience.
|
Allow searching campgrounds by name for convenience.
|
Python
|
bsd-3-clause
|
adamfast/geodjango-uscampgrounds,adamfast/geodjango-uscampgrounds
|
python
|
## Code Before:
from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
admin.site.register(Campground, CampgroundAdmin)
## Instruction:
Allow searching campgrounds by name for convenience.
## Code After:
from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
search_fields = ('name',)
admin.site.register(Campground, CampgroundAdmin)
|
# ... existing code ...
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
search_fields = ('name',)
admin.site.register(Campground, CampgroundAdmin)
# ... rest of the code ...
|
0575b4345fc21ca537a95866ff2a24d25128c698
|
readthedocs/config/find.py
|
readthedocs/config/find.py
|
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_all(path, filename_regex):
"""Find all files in ``path`` that match ``filename_regex`` regex."""
path = os.path.abspath(path)
for root, dirs, files in os.walk(path, topdown=True):
dirs.sort()
for filename in files:
if re.match(filename_regex, filename):
yield os.path.abspath(os.path.join(root, filename))
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
for _path in find_all(path, filename_regex):
return _path
return ''
|
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
_path = os.path.abspath(path)
for filename in os.listdir(_path):
if re.match(filename_regex, filename):
return os.path.join(_path, filename)
return ''
|
Remove logic for iterating directories to search for config file
|
Remove logic for iterating directories to search for config file
|
Python
|
mit
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
python
|
## Code Before:
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_all(path, filename_regex):
"""Find all files in ``path`` that match ``filename_regex`` regex."""
path = os.path.abspath(path)
for root, dirs, files in os.walk(path, topdown=True):
dirs.sort()
for filename in files:
if re.match(filename_regex, filename):
yield os.path.abspath(os.path.join(root, filename))
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
for _path in find_all(path, filename_regex):
return _path
return ''
## Instruction:
Remove logic for iterating directories to search for config file
## Code After:
"""Helper functions to search files."""
from __future__ import division, print_function, unicode_literals
import os
import re
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
_path = os.path.abspath(path)
for filename in os.listdir(_path):
if re.match(filename_regex, filename):
return os.path.join(_path, filename)
return ''
|
// ... existing code ...
import re
def find_one(path, filename_regex):
"""Find the first file in ``path`` that match ``filename_regex`` regex."""
_path = os.path.abspath(path)
for filename in os.listdir(_path):
if re.match(filename_regex, filename):
return os.path.join(_path, filename)
return ''
// ... rest of the code ...
|
4db6dd114fde94aac12d4a2f01ca23032cc8cc61
|
ARAnalyticalProvider.h
|
ARAnalyticalProvider.h
|
@class UINavigationController, UIViewController;
@interface ARAnalyticalProvider : NSObject
/// Init
- (id)initWithIdentifier:(NSString *)identifier;
/// Set a per user property
- (void)identifyUserWithID:(NSString *)userID andEmailAddress:(NSString *)email;
- (void)setUserProperty:(NSString *)property toValue:(NSString *)value;
/// Submit user events
- (void)event:(NSString *)event withProperties:(NSDictionary *)properties;
- (void)incrementUserProperty:(NSString *)counterName byInt:(NSNumber *)amount;
/// Submit errors
- (void)error:(NSError *)error withMessage:(NSString *)message;
/// Monitor Navigation changes as page view
- (void)monitorNavigationViewController:(UINavigationController *)controller;
/// Submit an event with a time interval
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval;
/// Submit an event with a time interval and extra properties
/// @warning the properites must not contain the key string `length`.
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval properties:(NSDictionary *)properties;
/// Pass a specific event for showing a page
- (void)didShowNewPageView:(NSString *)pageTitle;
/// Submit a string to the provider's logging system
- (void)remoteLog:(NSString *)parsedString;
- (void)localLog:(NSString *)message;
- (NSArray *)messagesForProcessID:(NSUInteger)processID;
@end
|
@class UINavigationController, UIViewController;
@interface ARAnalyticalProvider : NSObject
/// Init
- (id)initWithIdentifier:(NSString *)identifier;
/// Set a per user property
- (void)identifyUserWithID:(NSString *)userID andEmailAddress:(NSString *)email;
- (void)setUserProperty:(NSString *)property toValue:(NSString *)value;
/// Submit user events
- (void)event:(NSString *)event withProperties:(NSDictionary *)properties;
- (void)incrementUserProperty:(NSString *)counterName byInt:(NSNumber *)amount;
/// Submit errors
- (void)error:(NSError *)error withMessage:(NSString *)message;
/// Monitor Navigation changes as page view
- (void)monitorNavigationViewController:(UINavigationController *)controller;
/// Submit an event with a time interval
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval;
/// Submit an event with a time interval and extra properties
/// @warning the properites must not contain the key string `length`.
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval properties:(NSDictionary *)properties;
/// Pass a specific event for showing a page
- (void)didShowNewPageView:(NSString *)pageTitle;
/// Submit a string to the provider's logging system
- (void)remoteLog:(NSString *)parsedString;
/// Submit a string to the local persisted logging system
- (void)localLog:(NSString *)message;
/// Retrieve messages provided to the local persisted logging system originating from a specified process.
- (NSArray *)messagesForProcessID:(NSUInteger)processID;
@end
|
Document local persisted logging API.
|
Document local persisted logging API.
|
C
|
mit
|
ed-at-work/ARAnalytics,ashfurrow/ARAnalytics,sgtsquiggs/ARAnalytics,sp3esu/ARAnalytics,AlexanderBabchenko/ARAnalytics,orta/ARAnalytics,levigroker/ARAnalytics,arbesfeld/ARAnalytics,segiddins/ARAnalytics,sodastsai/ARAnalytics,HelloZhu/ARAnalytics,indiegogo/ARAnalytics,KBvsMJ/ARAnalytics,wzs/ARAnalytics,ftvs/ARAnalytics,imclean/ARAnalytics,diogot/ARAnalytics,BamX/ARAnalytics,skeeet/ARAnalytics,modulexcite/ARAnalytics,sunfei/ARAnalytics,natan/ARAnalytics,rinatkhanov/ARAnalytics
|
c
|
## Code Before:
@class UINavigationController, UIViewController;
@interface ARAnalyticalProvider : NSObject
/// Init
- (id)initWithIdentifier:(NSString *)identifier;
/// Set a per user property
- (void)identifyUserWithID:(NSString *)userID andEmailAddress:(NSString *)email;
- (void)setUserProperty:(NSString *)property toValue:(NSString *)value;
/// Submit user events
- (void)event:(NSString *)event withProperties:(NSDictionary *)properties;
- (void)incrementUserProperty:(NSString *)counterName byInt:(NSNumber *)amount;
/// Submit errors
- (void)error:(NSError *)error withMessage:(NSString *)message;
/// Monitor Navigation changes as page view
- (void)monitorNavigationViewController:(UINavigationController *)controller;
/// Submit an event with a time interval
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval;
/// Submit an event with a time interval and extra properties
/// @warning the properites must not contain the key string `length`.
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval properties:(NSDictionary *)properties;
/// Pass a specific event for showing a page
- (void)didShowNewPageView:(NSString *)pageTitle;
/// Submit a string to the provider's logging system
- (void)remoteLog:(NSString *)parsedString;
- (void)localLog:(NSString *)message;
- (NSArray *)messagesForProcessID:(NSUInteger)processID;
@end
## Instruction:
Document local persisted logging API.
## Code After:
@class UINavigationController, UIViewController;
@interface ARAnalyticalProvider : NSObject
/// Init
- (id)initWithIdentifier:(NSString *)identifier;
/// Set a per user property
- (void)identifyUserWithID:(NSString *)userID andEmailAddress:(NSString *)email;
- (void)setUserProperty:(NSString *)property toValue:(NSString *)value;
/// Submit user events
- (void)event:(NSString *)event withProperties:(NSDictionary *)properties;
- (void)incrementUserProperty:(NSString *)counterName byInt:(NSNumber *)amount;
/// Submit errors
- (void)error:(NSError *)error withMessage:(NSString *)message;
/// Monitor Navigation changes as page view
- (void)monitorNavigationViewController:(UINavigationController *)controller;
/// Submit an event with a time interval
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval;
/// Submit an event with a time interval and extra properties
/// @warning the properites must not contain the key string `length`.
- (void)logTimingEvent:(NSString *)event withInterval:(NSNumber *)interval properties:(NSDictionary *)properties;
/// Pass a specific event for showing a page
- (void)didShowNewPageView:(NSString *)pageTitle;
/// Submit a string to the provider's logging system
- (void)remoteLog:(NSString *)parsedString;
/// Submit a string to the local persisted logging system
- (void)localLog:(NSString *)message;
/// Retrieve messages provided to the local persisted logging system originating from a specified process.
- (NSArray *)messagesForProcessID:(NSUInteger)processID;
@end
|
...
/// Submit a string to the provider's logging system
- (void)remoteLog:(NSString *)parsedString;
/// Submit a string to the local persisted logging system
- (void)localLog:(NSString *)message;
/// Retrieve messages provided to the local persisted logging system originating from a specified process.
- (NSArray *)messagesForProcessID:(NSUInteger)processID;
@end
...
|
88a04efd1a7aa56a69b76b127908a5eca0c817bd
|
test/services/tv_remote/test_service.py
|
test/services/tv_remote/test_service.py
|
from roku import Roku
from app.core.messaging import Receiver
from app.core.servicemanager import ServiceManager
from app.services.tv_remote.service import RokuScanner, RokuTV
class TestRokuScanner(object):
@classmethod
def setup_class(cls):
cls.service_manager = ServiceManager(None)
cls.service_manager.start_services(["messaging"])
@classmethod
def teardown_class(cls):
cls.service_manager.stop()
def test_basic_discovery(self):
roku1 = Roku("abc")
scanner = RokuScanner("/devices", scan_interval=10)
scanner.discover_devices = lambda: [roku1]
scanner.get_device_id = lambda: "deviceid"
scanner.start()
receiver = Receiver("/devices")
msg = receiver.receive()
expected = {
"deviceid": {
"device_id": "deviceid",
"device_command_queue": "devices/tv/command",
"device_commands": RokuTV(None, None).read_commands()
}
}
assert msg == expected
|
import os
from roku import Roku
from app.core.messaging import Receiver
from app.core.servicemanager import ServiceManager
from app.services.tv_remote.service import RokuScanner, RokuTV
class TestRokuScanner(object):
@classmethod
def setup_class(cls):
os.environ["USE_FAKE_REDIS"] = "TRUE"
cls.service_manager = ServiceManager(None)
cls.service_manager.start_services(["messaging"])
@classmethod
def teardown_class(cls):
del os.environ["USE_FAKE_REDIS"]
cls.service_manager.stop()
def test_basic_discovery(self):
roku1 = Roku("abc")
scanner = RokuScanner("/devices", scan_interval=10)
scanner.discover_devices = lambda: [roku1]
scanner.get_device_id = lambda: "deviceid"
scanner.start()
receiver = Receiver("/devices")
msg = receiver.receive()
expected = {
"deviceid": {
"device_id": "deviceid",
"device_command_queue": "devices/tv/command",
"device_commands": RokuTV(None, None).read_commands()
}
}
assert msg == expected
|
Use Fake redis for tests.
|
Use Fake redis for tests.
|
Python
|
mit
|
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
|
python
|
## Code Before:
from roku import Roku
from app.core.messaging import Receiver
from app.core.servicemanager import ServiceManager
from app.services.tv_remote.service import RokuScanner, RokuTV
class TestRokuScanner(object):
@classmethod
def setup_class(cls):
cls.service_manager = ServiceManager(None)
cls.service_manager.start_services(["messaging"])
@classmethod
def teardown_class(cls):
cls.service_manager.stop()
def test_basic_discovery(self):
roku1 = Roku("abc")
scanner = RokuScanner("/devices", scan_interval=10)
scanner.discover_devices = lambda: [roku1]
scanner.get_device_id = lambda: "deviceid"
scanner.start()
receiver = Receiver("/devices")
msg = receiver.receive()
expected = {
"deviceid": {
"device_id": "deviceid",
"device_command_queue": "devices/tv/command",
"device_commands": RokuTV(None, None).read_commands()
}
}
assert msg == expected
## Instruction:
Use Fake redis for tests.
## Code After:
import os
from roku import Roku
from app.core.messaging import Receiver
from app.core.servicemanager import ServiceManager
from app.services.tv_remote.service import RokuScanner, RokuTV
class TestRokuScanner(object):
@classmethod
def setup_class(cls):
os.environ["USE_FAKE_REDIS"] = "TRUE"
cls.service_manager = ServiceManager(None)
cls.service_manager.start_services(["messaging"])
@classmethod
def teardown_class(cls):
del os.environ["USE_FAKE_REDIS"]
cls.service_manager.stop()
def test_basic_discovery(self):
roku1 = Roku("abc")
scanner = RokuScanner("/devices", scan_interval=10)
scanner.discover_devices = lambda: [roku1]
scanner.get_device_id = lambda: "deviceid"
scanner.start()
receiver = Receiver("/devices")
msg = receiver.receive()
expected = {
"deviceid": {
"device_id": "deviceid",
"device_command_queue": "devices/tv/command",
"device_commands": RokuTV(None, None).read_commands()
}
}
assert msg == expected
|
// ... existing code ...
import os
from roku import Roku
from app.core.messaging import Receiver
// ... modified code ...
class TestRokuScanner(object):
@classmethod
def setup_class(cls):
os.environ["USE_FAKE_REDIS"] = "TRUE"
cls.service_manager = ServiceManager(None)
cls.service_manager.start_services(["messaging"])
@classmethod
def teardown_class(cls):
del os.environ["USE_FAKE_REDIS"]
cls.service_manager.stop()
def test_basic_discovery(self):
// ... rest of the code ...
|
1d6e54c49babe1975a1b6285e8b7f039c04b3ab3
|
muduo/base/Atomic.h
|
muduo/base/Atomic.h
|
namespace muduo
{
class AtomicInt64 : boost::noncopyable
{
public:
AtomicInt64()
: value_(0)
{
}
int64_t get()
{
return value_;
}
int64_t addAndGet(int64_t x)
{
value_ += x;
return value_;
}
int64_t incrementAndGet()
{
return addAndGet(1);
}
int64_t getAndSet(int64_t newValue)
{
int64_t old = value_;
value_ = newValue;
return old;
}
private:
int64_t value_;
};
}
#endif // MUDUO_BASE_ATOMIC_H
|
namespace muduo
{
class AtomicInt64 : boost::noncopyable
{
public:
AtomicInt64()
: value_(0)
{
}
int64_t get()
{
return value_;
}
int64_t addAndGet(int64_t x)
{
return __sync_add_and_fetch(&value_, x);
}
int64_t incrementAndGet()
{
return addAndGet(1);
}
int64_t getAndSet(int64_t newValue)
{
return __sync_lock_test_and_set(&value_, newValue);
}
private:
volatile int64_t value_;
};
}
#endif // MUDUO_BASE_ATOMIC_H
|
Implement atomic integer with gcc builtins.
|
Implement atomic integer with gcc builtins.
|
C
|
bsd-3-clause
|
wangweihao/muduo,Cofyc/muduo,jxd134/muduo,shenhzou654321/muduo,floristt/muduo,floristt/muduo,SuperMXC/muduo,Cofyc/muduo,lvshiling/muduo,fc500110/muduo,danny200309/muduo,jerk1991/muduo,lizj3624/http-github.com-chenshuo-muduo-,jxd134/muduo,SourceInsight/muduo,zhuangshi23/muduo,SourceInsight/muduo,devsoulwolf/muduo,shenhzou654321/muduo,KunYi/muduo,devsoulwolf/muduo,lizj3624/muduo,KublaikhanGeek/muduo,DongweiLee/muduo,KingLebron/muduo,KublaikhanGeek/muduo,KingLebron/muduo,danny200309/muduo,yunhappy/muduo,shuang-shuang/muduo,DongweiLee/muduo,mitliucak/muduo,tsh185/muduo,danny200309/muduo,guker/muduo,huan80s/muduo,dhanzhang/muduo,lizj3624/muduo,yunhappy/muduo,floristt/muduo,SourceInsight/muduo,lvmaoxv/muduo,KingLebron/muduo,tsh185/muduo,wangweihao/muduo,lizj3624/http-github.com-chenshuo-muduo-,zhanMingming/muduo,lizj3624/muduo,Cofyc/muduo,ucfree/muduo,decimalbell/muduo,ywy2090/muduo,KingLebron/muduo,xzmagic/muduo,lizj3624/http-github.com-chenshuo-muduo-,wangweihao/muduo,dhanzhang/muduo,dhanzhang/muduo,SuperMXC/muduo,guker/muduo,SourceInsight/muduo,ucfree/muduo,decimalbell/muduo,shenhzou654321/muduo,fc500110/muduo,devsoulwolf/muduo,floristt/muduo,zhuangshi23/muduo,lizj3624/muduo,zxylvlp/muduo,huan80s/muduo,jerk1991/muduo,wangweihao/muduo,mitliucak/muduo,lizj3624/http-github.com-chenshuo-muduo-,lizhanhui/muduo,flyfeifan/muduo,mitliucak/muduo,Cofyc/muduo,Cofyc/muduo,westfly/muduo,xzmagic/muduo,mitliucak/muduo,mitliucak/muduo,decimalbell/muduo,decimalbell/muduo,daodaoliang/muduo,westfly/muduo,penyatree/muduo,devsoulwolf/muduo,danny200309/muduo,penyatree/muduo,wangweihao/muduo,pthreadself/muduo,SuperMXC/muduo,kidzyoung/muduo,pthreadself/muduo,lizj3624/http-github.com-chenshuo-muduo-,lizj3624/muduo,huan80s/muduo,ucfree/muduo,youprofit/muduo,zouzl/muduo-learning,ucfree/muduo,zhanMingming/muduo,daodaoliang/muduo,floristt/muduo,shenhzou654321/muduo,dhanzhang/muduo,KunYi/muduo,june505/muduo,lizhanhui/muduo,decimalbell/muduo,shuang-shuang/muduo,zxylvlp/muduo,dhanzhang/muduo,ywy2090/muduo,youprofit/muduo,june505/muduo,SuperMXC/muduo,zouzl/muduo-learning,danny200309/muduo,devsoulwolf/muduo,flyfeifan/muduo,zhuangshi23/muduo,lvmaoxv/muduo,ucfree/muduo,nestle1998/muduo,zxylvlp/muduo,SuperMXC/muduo,shenhzou654321/muduo,huan80s/muduo,KingLebron/muduo,kidzyoung/muduo,lvshiling/muduo,huan80s/muduo,zouzl/muduo-learning,SourceInsight/muduo,nestle1998/muduo,westfly/muduo
|
c
|
## Code Before:
namespace muduo
{
class AtomicInt64 : boost::noncopyable
{
public:
AtomicInt64()
: value_(0)
{
}
int64_t get()
{
return value_;
}
int64_t addAndGet(int64_t x)
{
value_ += x;
return value_;
}
int64_t incrementAndGet()
{
return addAndGet(1);
}
int64_t getAndSet(int64_t newValue)
{
int64_t old = value_;
value_ = newValue;
return old;
}
private:
int64_t value_;
};
}
#endif // MUDUO_BASE_ATOMIC_H
## Instruction:
Implement atomic integer with gcc builtins.
## Code After:
namespace muduo
{
class AtomicInt64 : boost::noncopyable
{
public:
AtomicInt64()
: value_(0)
{
}
int64_t get()
{
return value_;
}
int64_t addAndGet(int64_t x)
{
return __sync_add_and_fetch(&value_, x);
}
int64_t incrementAndGet()
{
return addAndGet(1);
}
int64_t getAndSet(int64_t newValue)
{
return __sync_lock_test_and_set(&value_, newValue);
}
private:
volatile int64_t value_;
};
}
#endif // MUDUO_BASE_ATOMIC_H
|
# ... existing code ...
int64_t addAndGet(int64_t x)
{
return __sync_add_and_fetch(&value_, x);
}
int64_t incrementAndGet()
# ... modified code ...
int64_t getAndSet(int64_t newValue)
{
return __sync_lock_test_and_set(&value_, newValue);
}
private:
volatile int64_t value_;
};
}
# ... rest of the code ...
|
d03b385b5d23c321ee1d4bd2020be1452e8c1cab
|
pika/__init__.py
|
pika/__init__.py
|
__version__ = '0.9.13p1'
from pika.connection import ConnectionParameters
from pika.connection import URLParameters
from pika.credentials import PlainCredentials
from pika.spec import BasicProperties
from pika.adapters.base_connection import BaseConnection
from pika.adapters.asyncore_connection import AsyncoreConnection
from pika.adapters.blocking_connection import BlockingConnection
from pika.adapters.select_connection import SelectConnection
# Python 2.4 support: add struct.unpack_from if it's missing.
try:
import struct
getattr(struct, "unpack_from")
except AttributeError:
def _unpack_from(fmt, buf, offset=0):
slice = buffer(buf, offset, struct.calcsize(fmt))
return struct.unpack(fmt, slice)
struct.unpack_from = _unpack_from
|
__version__ = '0.9.13p2'
from pika.connection import ConnectionParameters
from pika.connection import URLParameters
from pika.credentials import PlainCredentials
from pika.spec import BasicProperties
from pika.adapters.base_connection import BaseConnection
from pika.adapters.asyncore_connection import AsyncoreConnection
from pika.adapters.blocking_connection import BlockingConnection
from pika.adapters.select_connection import SelectConnection
|
Remove Python 2.4 support monkey patch and bump rev
|
Remove Python 2.4 support monkey patch and bump rev
|
Python
|
bsd-3-clause
|
reddec/pika,skftn/pika,Tarsbot/pika,shinji-s/pika,jstnlef/pika,zixiliuyue/pika,fkarb/pika-python3,renshawbay/pika-python3,vrtsystems/pika,Zephor5/pika,pika/pika,vitaly-krugl/pika,knowsis/pika,hugoxia/pika,benjamin9999/pika
|
python
|
## Code Before:
__version__ = '0.9.13p1'
from pika.connection import ConnectionParameters
from pika.connection import URLParameters
from pika.credentials import PlainCredentials
from pika.spec import BasicProperties
from pika.adapters.base_connection import BaseConnection
from pika.adapters.asyncore_connection import AsyncoreConnection
from pika.adapters.blocking_connection import BlockingConnection
from pika.adapters.select_connection import SelectConnection
# Python 2.4 support: add struct.unpack_from if it's missing.
try:
import struct
getattr(struct, "unpack_from")
except AttributeError:
def _unpack_from(fmt, buf, offset=0):
slice = buffer(buf, offset, struct.calcsize(fmt))
return struct.unpack(fmt, slice)
struct.unpack_from = _unpack_from
## Instruction:
Remove Python 2.4 support monkey patch and bump rev
## Code After:
__version__ = '0.9.13p2'
from pika.connection import ConnectionParameters
from pika.connection import URLParameters
from pika.credentials import PlainCredentials
from pika.spec import BasicProperties
from pika.adapters.base_connection import BaseConnection
from pika.adapters.asyncore_connection import AsyncoreConnection
from pika.adapters.blocking_connection import BlockingConnection
from pika.adapters.select_connection import SelectConnection
|
// ... existing code ...
__version__ = '0.9.13p2'
from pika.connection import ConnectionParameters
from pika.connection import URLParameters
// ... modified code ...
from pika.adapters.asyncore_connection import AsyncoreConnection
from pika.adapters.blocking_connection import BlockingConnection
from pika.adapters.select_connection import SelectConnection
// ... rest of the code ...
|
0476f5a59b5285fa625089167ef3b373525060b8
|
src/libreset/util/macros.h
|
src/libreset/util/macros.h
|
/**
* @file macros.h
*
* This file contains simple helper macros
*/
/**
* @addtogroup internal_util_helper_macros "(internal) helper macros"
*
* This group contains helper macros for internal use only.
*
* @{
*/
/**
* Computes the maximum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MAX(x,y) \
({ __typeof__ (x) _x = (x); \
__typeof__ (y) _y = (y); \
_x > _y ? _x : _y; })
/**
* Computes the minimum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MIN(x,y) \
({ __typeof__ (x) _x = (x); \
__typeof__ (y) _y = (y); \
_x < _y ? _x : _y; })
/** @} */
#endif //__MACROS_H__
|
/**
* @file macros.h
*
* This file contains simple helper macros
*/
/**
* @addtogroup internal_util_helper_macros "(internal) helper macros"
*
* This group contains helper macros for internal use only.
*
* @{
*/
/**
* Computes the maximum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MAX(x,y) \
((__typeof__(x)) x > (__typeof__(x)) y ? \
(__typeof__(x)) x : (__typeof__(x)) y)
/**
* Computes the minimum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MIN(x,y) \
({ __typeof__ (x) _x = (x); \
__typeof__ (y) _y = (y); \
_x < _y ? _x : _y; })
/** @} */
#endif //__MACROS_H__
|
Modify MAX(x,y) to not contain a scope
|
Modify MAX(x,y) to not contain a scope
|
C
|
lgpl-2.1
|
waysome/libreset,waysome/libreset
|
c
|
## Code Before:
/**
* @file macros.h
*
* This file contains simple helper macros
*/
/**
* @addtogroup internal_util_helper_macros "(internal) helper macros"
*
* This group contains helper macros for internal use only.
*
* @{
*/
/**
* Computes the maximum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MAX(x,y) \
({ __typeof__ (x) _x = (x); \
__typeof__ (y) _y = (y); \
_x > _y ? _x : _y; })
/**
* Computes the minimum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MIN(x,y) \
({ __typeof__ (x) _x = (x); \
__typeof__ (y) _y = (y); \
_x < _y ? _x : _y; })
/** @} */
#endif //__MACROS_H__
## Instruction:
Modify MAX(x,y) to not contain a scope
## Code After:
/**
* @file macros.h
*
* This file contains simple helper macros
*/
/**
* @addtogroup internal_util_helper_macros "(internal) helper macros"
*
* This group contains helper macros for internal use only.
*
* @{
*/
/**
* Computes the maximum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MAX(x,y) \
((__typeof__(x)) x > (__typeof__(x)) y ? \
(__typeof__(x)) x : (__typeof__(x)) y)
/**
* Computes the minimum value of the two passed values
*
* @note Provides compile-time type checking by using temp variables before
* doing the comparison.
*
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MIN(x,y) \
({ __typeof__ (x) _x = (x); \
__typeof__ (y) _y = (y); \
_x < _y ? _x : _y; })
/** @} */
#endif //__MACROS_H__
|
# ... existing code ...
* @note Opens own scope, so the temp variables do not show up outside of the
* macro.
*/
#define MAX(x,y) \
((__typeof__(x)) x > (__typeof__(x)) y ? \
(__typeof__(x)) x : (__typeof__(x)) y)
/**
* Computes the minimum value of the two passed values
# ... rest of the code ...
|
c512b522c0ed8c43cd304002d25f9cc0998ad048
|
parity-match/src/main/java/org/jvirtanen/parity/match/MarketListener.java
|
parity-match/src/main/java/org/jvirtanen/parity/match/MarketListener.java
|
package org.jvirtanen.parity.match;
/**
* <code>MarketListener</code> is the interface for outbound events from the
* matching engine.
*/
public interface MarketListener {
/**
* Match an incoming order to a resting order in the order book. The match
* occurs at the price of the order in the order book.
*
* @param restingOrderId the order identifier of the resting order
* @param incomingOrderId the order identifier of the incoming order
* @param incomingSide the side of the incoming order
* @param price the execution price
* @param executedQuantity the executed quantity
* @param remainingQuantity the remaining quantity of the resting order
*/
void match(long restingOrderId, long incomingOrderId, Side incomingSide, long price, long executedQuantity, long remainingQuantity);
/**
* Add an order to the order book.
*
* @param orderId the order identifier
* @param side the side
* @param price the limit price
* @param size the size
*/
void add(long orderId, Side side, long price, long size);
/**
* Cancel a quantity of an order.
*
* @param orderId the order identifier
* @param canceledQuantity the canceled quantity
* @param remainingQuantity the remaining quantity
*/
void cancel(long orderId, long canceledQuantity, long remainingQuantity);
}
|
package org.jvirtanen.parity.match;
/**
* The interface for outbound events from the matching engine.
*/
public interface MarketListener {
/**
* Match an incoming order to a resting order in the order book. The match
* occurs at the price of the order in the order book.
*
* @param restingOrderId the order identifier of the resting order
* @param incomingOrderId the order identifier of the incoming order
* @param incomingSide the side of the incoming order
* @param price the execution price
* @param executedQuantity the executed quantity
* @param remainingQuantity the remaining quantity of the resting order
*/
void match(long restingOrderId, long incomingOrderId, Side incomingSide, long price, long executedQuantity, long remainingQuantity);
/**
* Add an order to the order book.
*
* @param orderId the order identifier
* @param side the side
* @param price the limit price
* @param size the size
*/
void add(long orderId, Side side, long price, long size);
/**
* Cancel a quantity of an order.
*
* @param orderId the order identifier
* @param canceledQuantity the canceled quantity
* @param remainingQuantity the remaining quantity
*/
void cancel(long orderId, long canceledQuantity, long remainingQuantity);
}
|
Tweak documentation for market listener in matching engine
|
Tweak documentation for market listener in matching engine
|
Java
|
apache-2.0
|
paritytrading/parity,pmcs/parity,paritytrading/parity,pmcs/parity
|
java
|
## Code Before:
package org.jvirtanen.parity.match;
/**
* <code>MarketListener</code> is the interface for outbound events from the
* matching engine.
*/
public interface MarketListener {
/**
* Match an incoming order to a resting order in the order book. The match
* occurs at the price of the order in the order book.
*
* @param restingOrderId the order identifier of the resting order
* @param incomingOrderId the order identifier of the incoming order
* @param incomingSide the side of the incoming order
* @param price the execution price
* @param executedQuantity the executed quantity
* @param remainingQuantity the remaining quantity of the resting order
*/
void match(long restingOrderId, long incomingOrderId, Side incomingSide, long price, long executedQuantity, long remainingQuantity);
/**
* Add an order to the order book.
*
* @param orderId the order identifier
* @param side the side
* @param price the limit price
* @param size the size
*/
void add(long orderId, Side side, long price, long size);
/**
* Cancel a quantity of an order.
*
* @param orderId the order identifier
* @param canceledQuantity the canceled quantity
* @param remainingQuantity the remaining quantity
*/
void cancel(long orderId, long canceledQuantity, long remainingQuantity);
}
## Instruction:
Tweak documentation for market listener in matching engine
## Code After:
package org.jvirtanen.parity.match;
/**
* The interface for outbound events from the matching engine.
*/
public interface MarketListener {
/**
* Match an incoming order to a resting order in the order book. The match
* occurs at the price of the order in the order book.
*
* @param restingOrderId the order identifier of the resting order
* @param incomingOrderId the order identifier of the incoming order
* @param incomingSide the side of the incoming order
* @param price the execution price
* @param executedQuantity the executed quantity
* @param remainingQuantity the remaining quantity of the resting order
*/
void match(long restingOrderId, long incomingOrderId, Side incomingSide, long price, long executedQuantity, long remainingQuantity);
/**
* Add an order to the order book.
*
* @param orderId the order identifier
* @param side the side
* @param price the limit price
* @param size the size
*/
void add(long orderId, Side side, long price, long size);
/**
* Cancel a quantity of an order.
*
* @param orderId the order identifier
* @param canceledQuantity the canceled quantity
* @param remainingQuantity the remaining quantity
*/
void cancel(long orderId, long canceledQuantity, long remainingQuantity);
}
|
...
package org.jvirtanen.parity.match;
/**
* The interface for outbound events from the matching engine.
*/
public interface MarketListener {
...
|
f5ec7afe7039cc6c897844da274f4b454d34c646
|
src/perftest/java/com/lmax/disruptor/PerfTestContext.java
|
src/perftest/java/com/lmax/disruptor/PerfTestContext.java
|
package com.lmax.disruptor;
public class PerfTestContext {
private long disruptorOps;
private long batchesProcessedCount;
private long iterations;
public PerfTestContext() {
}
public long getDisruptorOps() {
return disruptorOps;
}
public void setDisruptorOps(long disruptorOps) {
this.disruptorOps = disruptorOps;
}
public long getBatchesProcessedCount() {
return batchesProcessedCount;
}
public double getBatchPercent() {
return 1 - (double)batchesProcessedCount / iterations;
}
public double getAverageBatchSize() {
if (batchesProcessedCount == 0) return -1;
return (double)iterations / batchesProcessedCount;
}
public void setBatchData(long batchesProcessedCount, long iterations) {
this.batchesProcessedCount = batchesProcessedCount;
this.iterations = iterations;
}
}
|
package com.lmax.disruptor;
public class PerfTestContext {
private long disruptorOps;
private long batchesProcessedCount;
private long iterations;
public PerfTestContext() {
}
public long getDisruptorOps() {
return disruptorOps;
}
public void setDisruptorOps(long disruptorOps) {
this.disruptorOps = disruptorOps;
}
public long getBatchesProcessedCount() {
return batchesProcessedCount;
}
public double getBatchPercent() {
if (batchesProcessedCount == 0) return 0;
return 1 - (double)batchesProcessedCount / iterations;
}
public double getAverageBatchSize() {
if (batchesProcessedCount == 0) return -1;
return (double)iterations / batchesProcessedCount;
}
public void setBatchData(long batchesProcessedCount, long iterations) {
this.batchesProcessedCount = batchesProcessedCount;
this.iterations = iterations;
}
}
|
Fix display when batches are not counted
|
Fix display when batches are not counted
|
Java
|
apache-2.0
|
LMAX-Exchange/disruptor,LMAX-Exchange/disruptor
|
java
|
## Code Before:
package com.lmax.disruptor;
public class PerfTestContext {
private long disruptorOps;
private long batchesProcessedCount;
private long iterations;
public PerfTestContext() {
}
public long getDisruptorOps() {
return disruptorOps;
}
public void setDisruptorOps(long disruptorOps) {
this.disruptorOps = disruptorOps;
}
public long getBatchesProcessedCount() {
return batchesProcessedCount;
}
public double getBatchPercent() {
return 1 - (double)batchesProcessedCount / iterations;
}
public double getAverageBatchSize() {
if (batchesProcessedCount == 0) return -1;
return (double)iterations / batchesProcessedCount;
}
public void setBatchData(long batchesProcessedCount, long iterations) {
this.batchesProcessedCount = batchesProcessedCount;
this.iterations = iterations;
}
}
## Instruction:
Fix display when batches are not counted
## Code After:
package com.lmax.disruptor;
public class PerfTestContext {
private long disruptorOps;
private long batchesProcessedCount;
private long iterations;
public PerfTestContext() {
}
public long getDisruptorOps() {
return disruptorOps;
}
public void setDisruptorOps(long disruptorOps) {
this.disruptorOps = disruptorOps;
}
public long getBatchesProcessedCount() {
return batchesProcessedCount;
}
public double getBatchPercent() {
if (batchesProcessedCount == 0) return 0;
return 1 - (double)batchesProcessedCount / iterations;
}
public double getAverageBatchSize() {
if (batchesProcessedCount == 0) return -1;
return (double)iterations / batchesProcessedCount;
}
public void setBatchData(long batchesProcessedCount, long iterations) {
this.batchesProcessedCount = batchesProcessedCount;
this.iterations = iterations;
}
}
|
...
}
public double getBatchPercent() {
if (batchesProcessedCount == 0) return 0;
return 1 - (double)batchesProcessedCount / iterations;
}
...
|
91a7e4ba30c2c455c58b7069015680b7af511cc4
|
tests/test_get_joke.py
|
tests/test_get_joke.py
|
def test_get_joke():
from pyjokes import get_joke
for i in range(10):
assert get_joke()
languages = ['eng', 'de', 'spa']
categories = ['neutral', 'explicit', 'all']
for lang in languages:
for cat in categories:
for i in range(10):
assert get_joke(cat, lang)
|
import pytest
from pyjokes import get_joke
from pyjokes.pyjokes import LanguageNotFoundError, CategoryNotFoundError
def test_get_joke():
assert get_joke()
languages = ['en', 'de', 'es']
categories = ['neutral', 'explicit', 'all']
for lang in languages:
assert get_joke(language=lang)
for cat in categories:
assert get_joke(category=cat)
def test_get_joke_raises():
assert pytest.raises(LanguageNotFoundError, get_joke, language='eu')
assert pytest.raises(LanguageNotFoundError, get_joke, language='tr')
assert pytest.raises(CategoryNotFoundError, get_joke, category='123')
|
Simplify get_joke test, add raise checks
|
Simplify get_joke test, add raise checks
|
Python
|
bsd-3-clause
|
borjaayerdi/pyjokes,trojjer/pyjokes,martinohanlon/pyjokes,bennuttall/pyjokes,ElectronicsGeek/pyjokes,pyjokes/pyjokes,gmarkall/pyjokes
|
python
|
## Code Before:
def test_get_joke():
from pyjokes import get_joke
for i in range(10):
assert get_joke()
languages = ['eng', 'de', 'spa']
categories = ['neutral', 'explicit', 'all']
for lang in languages:
for cat in categories:
for i in range(10):
assert get_joke(cat, lang)
## Instruction:
Simplify get_joke test, add raise checks
## Code After:
import pytest
from pyjokes import get_joke
from pyjokes.pyjokes import LanguageNotFoundError, CategoryNotFoundError
def test_get_joke():
assert get_joke()
languages = ['en', 'de', 'es']
categories = ['neutral', 'explicit', 'all']
for lang in languages:
assert get_joke(language=lang)
for cat in categories:
assert get_joke(category=cat)
def test_get_joke_raises():
assert pytest.raises(LanguageNotFoundError, get_joke, language='eu')
assert pytest.raises(LanguageNotFoundError, get_joke, language='tr')
assert pytest.raises(CategoryNotFoundError, get_joke, category='123')
|
...
import pytest
from pyjokes import get_joke
from pyjokes.pyjokes import LanguageNotFoundError, CategoryNotFoundError
def test_get_joke():
assert get_joke()
languages = ['en', 'de', 'es']
categories = ['neutral', 'explicit', 'all']
for lang in languages:
assert get_joke(language=lang)
for cat in categories:
assert get_joke(category=cat)
def test_get_joke_raises():
assert pytest.raises(LanguageNotFoundError, get_joke, language='eu')
assert pytest.raises(LanguageNotFoundError, get_joke, language='tr')
assert pytest.raises(CategoryNotFoundError, get_joke, category='123')
...
|
a382c448da054db4631bb9940d27d4b527d7d5ce
|
execute_all_tests.py
|
execute_all_tests.py
|
import os
import sys
import argparse
from coalib.tests.TestHelper import TestHelper
def show_help():
print("Usage: {name} [OPTIONS]".format(name=sys.argv[0]))
print()
print("--help : Show this help text")
print("--cover : Use coverage to get statement and branch coverage of tests")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--cover", help="measure code coverage", action="store_true")
args = parser.parse_args()
files = TestHelper.get_test_files(os.path.abspath("coalib/tests"))
exit(TestHelper.execute_python3_files(files, args.cover))
|
import os
import sys
import argparse
from coalib.tests.TestHelper import TestHelper
def show_help():
print("Usage: {name} [OPTIONS]".format(name=sys.argv[0]))
print()
print("--help : Show this help text")
print("--cover : Use coverage to get statement and branch coverage of tests")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--cover", help="measure code coverage", action="store_true")
parser.add_argument("-b", "--ignore-bear-tests", help="ignore bear tests", action="store_true")
parser.add_argument("-m", "--ignore-main-tests", help="ignore main program tests", action="store_true")
args = parser.parse_args()
files = []
if not args.ignore_main_tests:
files.extend(TestHelper.get_test_files(os.path.abspath("coalib/tests")))
if not args.ignore_bear_tests:
files.extend(TestHelper.get_test_files(os.path.abspath("bears/tests")))
exit(TestHelper.execute_python3_files(files, args.cover))
|
Test execution: Add bear tests
|
Test execution: Add bear tests
|
Python
|
agpl-3.0
|
Asalle/coala,incorrectusername/coala,AdeshAtole/coala,stevemontana1980/coala,Balaji2198/coala,NalinG/coala,abhiroyg/coala,SambitAcharya/coala,tushar-rishav/coala,RJ722/coala,arjunsinghy96/coala,netman92/coala,scriptnull/coala,damngamerz/coala,d6e/coala,FeodorFitsner/coala,stevemontana1980/coala,abhiroyg/coala,saurabhiiit/coala,karansingh1559/coala,jayvdb/coala,scriptnull/coala,Uran198/coala,djkonro/coala,kartikeys98/coala,jayvdb/coala,Asnelchristian/coala,nemaniarjun/coala,aptrishu/coala,vinc456/coala,Uran198/coala,sudheesh001/coala,andreimacavei/coala,MariosPanag/coala,arush0311/coala,lonewolf07/coala,SanketDG/coala,MattAllmendinger/coala,coala/coala,scriptnull/coala,scriptnull/coala,aptrishu/coala,NalinG/coala,coala/coala,RJ722/coala,impmihai/coala,scriptnull/coala,MariosPanag/coala,sudheesh001/coala,arafsheikh/coala,yashLadha/coala,NiklasMM/coala,MattAllmendinger/coala,CruiseDevice/coala,CruiseDevice/coala,arush0311/coala,MariosPanag/coala,SambitAcharya/coala,dagdaggo/coala,yashtrivedi96/coala,SanketDG/coala,incorrectusername/coala,Tanmay28/coala,yashtrivedi96/coala,coala-analyzer/coala,yashtrivedi96/coala,Nosferatul/coala,Balaji2198/coala,Tanmay28/coala,Shade5/coala,dagdaggo/coala,lonewolf07/coala,rimacone/testing2,AdeshAtole/coala,shreyans800755/coala,andreimacavei/coala,stevemontana1980/coala,andreimacavei/coala,yland/coala,karansingh1559/coala,mr-karan/coala,swatilodha/coala,scottbelden/coala,ayushin78/coala,AdeshAtole/coala,svsn2117/coala,ayushin78/coala,Tanmay28/coala,AbdealiJK/coala,netman92/coala,JohnS-01/coala,swatilodha/coala,coala-analyzer/coala,NalinG/coala,impmihai/coala,FeodorFitsner/coala,NiklasMM/coala,vinc456/coala,SambitAcharya/coala,tltuan/coala,saurabhiiit/coala,scottbelden/coala,SambitAcharya/coala,shreyans800755/coala,rresol/coala,Asnelchristian/coala,tltuan/coala,swatilodha/coala,scriptnull/coala,Shade5/coala,CruiseDevice/coala,Tanmay28/coala,yashLadha/coala,Asalle/coala,djkonro/coala,vinc456/coala,refeed/coala,AbdealiJK/coala,d6e/coala,mr-karan/coala,dagdaggo/coala,rimacone/testing2,rresol/coala,mr-karan/coala,Uran198/coala,Asnelchristian/coala,svsn2117/coala,lonewolf07/coala,SanketDG/coala,meetmangukiya/coala,yashLadha/coala,sophiavanvalkenburg/coala,JohnS-01/coala,kartikeys98/coala,ManjiriBirajdar/coala,yland/coala,Balaji2198/coala,Tanmay28/coala,rresol/coala,Nosferatul/coala,arjunsinghy96/coala,arjunsinghy96/coala,incorrectusername/coala,meetmangukiya/coala,coala-analyzer/coala,coala/coala,RJ722/coala,jayvdb/coala,Tanmay28/coala,FeodorFitsner/coala,refeed/coala,scottbelden/coala,sophiavanvalkenburg/coala,Nosferatul/coala,damngamerz/coala,NalinG/coala,arush0311/coala,ayushin78/coala,d6e/coala,sagark123/coala,MattAllmendinger/coala,sils1297/coala,impmihai/coala,Tanmay28/coala,Tanmay28/coala,NalinG/coala,netman92/coala,sudheesh001/coala,SambitAcharya/coala,damngamerz/coala,aptrishu/coala,JohnS-01/coala,ManjiriBirajdar/coala,scriptnull/coala,tushar-rishav/coala,saurabhiiit/coala,arafsheikh/coala,AbdealiJK/coala,NiklasMM/coala,Asalle/coala,sils1297/coala,sagark123/coala,SambitAcharya/coala,SambitAcharya/coala,shreyans800755/coala,sophiavanvalkenburg/coala,nemaniarjun/coala,Shade5/coala,sagark123/coala,NalinG/coala,rimacone/testing2,svsn2117/coala,tushar-rishav/coala,arafsheikh/coala,djkonro/coala,ManjiriBirajdar/coala,refeed/coala,NalinG/coala,sils1297/coala,nemaniarjun/coala,abhiroyg/coala,meetmangukiya/coala,kartikeys98/coala,yland/coala,karansingh1559/coala,tltuan/coala
|
python
|
## Code Before:
import os
import sys
import argparse
from coalib.tests.TestHelper import TestHelper
def show_help():
print("Usage: {name} [OPTIONS]".format(name=sys.argv[0]))
print()
print("--help : Show this help text")
print("--cover : Use coverage to get statement and branch coverage of tests")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--cover", help="measure code coverage", action="store_true")
args = parser.parse_args()
files = TestHelper.get_test_files(os.path.abspath("coalib/tests"))
exit(TestHelper.execute_python3_files(files, args.cover))
## Instruction:
Test execution: Add bear tests
## Code After:
import os
import sys
import argparse
from coalib.tests.TestHelper import TestHelper
def show_help():
print("Usage: {name} [OPTIONS]".format(name=sys.argv[0]))
print()
print("--help : Show this help text")
print("--cover : Use coverage to get statement and branch coverage of tests")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--cover", help="measure code coverage", action="store_true")
parser.add_argument("-b", "--ignore-bear-tests", help="ignore bear tests", action="store_true")
parser.add_argument("-m", "--ignore-main-tests", help="ignore main program tests", action="store_true")
args = parser.parse_args()
files = []
if not args.ignore_main_tests:
files.extend(TestHelper.get_test_files(os.path.abspath("coalib/tests")))
if not args.ignore_bear_tests:
files.extend(TestHelper.get_test_files(os.path.abspath("bears/tests")))
exit(TestHelper.execute_python3_files(files, args.cover))
|
// ... existing code ...
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--cover", help="measure code coverage", action="store_true")
parser.add_argument("-b", "--ignore-bear-tests", help="ignore bear tests", action="store_true")
parser.add_argument("-m", "--ignore-main-tests", help="ignore main program tests", action="store_true")
args = parser.parse_args()
files = []
if not args.ignore_main_tests:
files.extend(TestHelper.get_test_files(os.path.abspath("coalib/tests")))
if not args.ignore_bear_tests:
files.extend(TestHelper.get_test_files(os.path.abspath("bears/tests")))
exit(TestHelper.execute_python3_files(files, args.cover))
// ... rest of the code ...
|
039a19032bebd1e6852990f8aacf05042f000070
|
args.py
|
args.py
|
import inspect
def argspec_set(func):
if not hasattr(func, 'argspec'):
func.argspec = inspect.getargspec(func)
def argspec_iscompat(func, lenargs):
spec = func.argspec
minargs = len(spec.args) - len(spec.defaults or ())
maxargs = len(spec.args) if spec.varargs is None else None
return lenargs >= minargs and (maxargs is None or lenargs <= maxargs)
class ArgCountError(Exception):
pass
|
import inspect
def argspec_set(func):
if not hasattr(func, 'argspec'):
func.argspec = inspect.getargspec(func)
def argspec_iscompat(func, lenargs):
spec = func.argspec
minargs = len(spec.args) - len(spec.defaults or ())
maxargs = len(spec.args) if spec.varargs is None else float("infinity")
return minargs <= lenargs <= maxargs
class ArgCountError(Exception):
pass
|
Simplify funcion arg compatibility check
|
Simplify funcion arg compatibility check
|
Python
|
mit
|
infogulch/pyspades-events
|
python
|
## Code Before:
import inspect
def argspec_set(func):
if not hasattr(func, 'argspec'):
func.argspec = inspect.getargspec(func)
def argspec_iscompat(func, lenargs):
spec = func.argspec
minargs = len(spec.args) - len(spec.defaults or ())
maxargs = len(spec.args) if spec.varargs is None else None
return lenargs >= minargs and (maxargs is None or lenargs <= maxargs)
class ArgCountError(Exception):
pass
## Instruction:
Simplify funcion arg compatibility check
## Code After:
import inspect
def argspec_set(func):
if not hasattr(func, 'argspec'):
func.argspec = inspect.getargspec(func)
def argspec_iscompat(func, lenargs):
spec = func.argspec
minargs = len(spec.args) - len(spec.defaults or ())
maxargs = len(spec.args) if spec.varargs is None else float("infinity")
return minargs <= lenargs <= maxargs
class ArgCountError(Exception):
pass
|
// ... existing code ...
def argspec_iscompat(func, lenargs):
spec = func.argspec
minargs = len(spec.args) - len(spec.defaults or ())
maxargs = len(spec.args) if spec.varargs is None else float("infinity")
return minargs <= lenargs <= maxargs
class ArgCountError(Exception):
pass
// ... rest of the code ...
|
5afd7d6884b996be0a0569541f711d479f9e6c32
|
Project2/apptbook/src/main/java/edu/pdx/cs410J/chances/TextParser.java
|
Project2/apptbook/src/main/java/edu/pdx/cs410J/chances/TextParser.java
|
package edu.pdx.cs410J.chances;
import edu.pdx.cs410J.AbstractAppointmentBook;
import edu.pdx.cs410J.AppointmentBookParser;
import edu.pdx.cs410J.ParserException;
/**
* @author chancesnow
*/
public class TextParser implements AppointmentBookParser
{
@Override
public AbstractAppointmentBook parse() throws ParserException
{
return null;
}
}
|
package edu.pdx.cs410J.chances;
import edu.pdx.cs410J.AbstractAppointmentBook;
import edu.pdx.cs410J.AppointmentBookParser;
import edu.pdx.cs410J.ParserException;
import java.io.File;
/**
* @author chancesnow
*/
public class TextParser implements AppointmentBookParser
{
private File file;
public TextParser(String filePath)
{
file = new File(filePath);
if (!file.exists() || !file.isDirectory()) {
file = null;
}
}
@Override
public AbstractAppointmentBook parse() throws ParserException
{
return null;
}
}
|
Add constructor accepting file path
|
Add constructor accepting file path
|
Java
|
mit
|
chances/cs410-adv-java,chances/cs410-adv-java,chances/cs410-adv-java
|
java
|
## Code Before:
package edu.pdx.cs410J.chances;
import edu.pdx.cs410J.AbstractAppointmentBook;
import edu.pdx.cs410J.AppointmentBookParser;
import edu.pdx.cs410J.ParserException;
/**
* @author chancesnow
*/
public class TextParser implements AppointmentBookParser
{
@Override
public AbstractAppointmentBook parse() throws ParserException
{
return null;
}
}
## Instruction:
Add constructor accepting file path
## Code After:
package edu.pdx.cs410J.chances;
import edu.pdx.cs410J.AbstractAppointmentBook;
import edu.pdx.cs410J.AppointmentBookParser;
import edu.pdx.cs410J.ParserException;
import java.io.File;
/**
* @author chancesnow
*/
public class TextParser implements AppointmentBookParser
{
private File file;
public TextParser(String filePath)
{
file = new File(filePath);
if (!file.exists() || !file.isDirectory()) {
file = null;
}
}
@Override
public AbstractAppointmentBook parse() throws ParserException
{
return null;
}
}
|
...
import edu.pdx.cs410J.AppointmentBookParser;
import edu.pdx.cs410J.ParserException;
import java.io.File;
/**
* @author chancesnow
*/
public class TextParser implements AppointmentBookParser
{
private File file;
public TextParser(String filePath)
{
file = new File(filePath);
if (!file.exists() || !file.isDirectory()) {
file = null;
}
}
@Override
public AbstractAppointmentBook parse() throws ParserException
{
...
|
81b6a138c476084f9ddd6063f31d3efd0ba6e2cf
|
start.py
|
start.py
|
import argparse
import logging
import os
import sys
from twisted.internet import reactor
from desertbot.config import Config, ConfigError
from desertbot.factory import DesertBotFactory
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='An IRC bot written in Python.')
parser.add_argument('-c', '--config',
help='the config file to read from',
type=str, required=True)
cmdArgs = parser.parse_args()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Set up logging for stdout on the root 'desertbot' logger
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
rootLogger.setLevel(logging.INFO) # TODO change this from config value once it's loaded
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
streamHandler = logging.StreamHandler(stream=sys.stdout)
streamHandler.setFormatter(logFormatter)
rootLogger.addHandler(streamHandler)
config = Config(cmdArgs.config)
try:
config.loadConfig()
except ConfigError:
rootLogger.exception("Failed to load configuration file {}".format(cmdArgs.config))
else:
factory = DesertBotFactory(config)
reactor.run()
|
import argparse
import logging
import os
import sys
from twisted.internet import reactor
from desertbot.config import Config, ConfigError
from desertbot.factory import DesertBotFactory
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='An IRC bot written in Python.')
parser.add_argument('-c', '--config',
help='the config file to read from',
type=str, required=True)
parser.add_argument('-l', '--loglevel',
help='the logging level (default INFO)',
type=str, default='INFO')
cmdArgs = parser.parse_args()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Set up logging for stdout on the root 'desertbot' logger
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
if isinstance(numericLevel, int):
rootLogger.setLevel(numericLevel)
else:
raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
streamHandler = logging.StreamHandler(stream=sys.stdout)
streamHandler.setFormatter(logFormatter)
rootLogger.addHandler(streamHandler)
config = Config(cmdArgs.config)
try:
config.loadConfig()
except ConfigError:
rootLogger.exception("Failed to load configuration file {}".format(cmdArgs.config))
else:
factory = DesertBotFactory(config)
reactor.run()
|
Make the logging level configurable
|
Make the logging level configurable
|
Python
|
mit
|
DesertBot/DesertBot
|
python
|
## Code Before:
import argparse
import logging
import os
import sys
from twisted.internet import reactor
from desertbot.config import Config, ConfigError
from desertbot.factory import DesertBotFactory
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='An IRC bot written in Python.')
parser.add_argument('-c', '--config',
help='the config file to read from',
type=str, required=True)
cmdArgs = parser.parse_args()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Set up logging for stdout on the root 'desertbot' logger
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
rootLogger.setLevel(logging.INFO) # TODO change this from config value once it's loaded
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
streamHandler = logging.StreamHandler(stream=sys.stdout)
streamHandler.setFormatter(logFormatter)
rootLogger.addHandler(streamHandler)
config = Config(cmdArgs.config)
try:
config.loadConfig()
except ConfigError:
rootLogger.exception("Failed to load configuration file {}".format(cmdArgs.config))
else:
factory = DesertBotFactory(config)
reactor.run()
## Instruction:
Make the logging level configurable
## Code After:
import argparse
import logging
import os
import sys
from twisted.internet import reactor
from desertbot.config import Config, ConfigError
from desertbot.factory import DesertBotFactory
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='An IRC bot written in Python.')
parser.add_argument('-c', '--config',
help='the config file to read from',
type=str, required=True)
parser.add_argument('-l', '--loglevel',
help='the logging level (default INFO)',
type=str, default='INFO')
cmdArgs = parser.parse_args()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Set up logging for stdout on the root 'desertbot' logger
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
if isinstance(numericLevel, int):
rootLogger.setLevel(numericLevel)
else:
raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
streamHandler = logging.StreamHandler(stream=sys.stdout)
streamHandler.setFormatter(logFormatter)
rootLogger.addHandler(streamHandler)
config = Config(cmdArgs.config)
try:
config.loadConfig()
except ConfigError:
rootLogger.exception("Failed to load configuration file {}".format(cmdArgs.config))
else:
factory = DesertBotFactory(config)
reactor.run()
|
// ... existing code ...
parser.add_argument('-c', '--config',
help='the config file to read from',
type=str, required=True)
parser.add_argument('-l', '--loglevel',
help='the logging level (default INFO)',
type=str, default='INFO')
cmdArgs = parser.parse_args()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
// ... modified code ...
# Set up logging for stdout on the root 'desertbot' logger
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
if isinstance(numericLevel, int):
rootLogger.setLevel(numericLevel)
else:
raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
// ... rest of the code ...
|
6de8011f0e5301d6d3b42efde3f90e2d697be2ed
|
src/test/java/net/fabricmc/test/mixin/MixinGuiMain.java
|
src/test/java/net/fabricmc/test/mixin/MixinGuiMain.java
|
/*
* Copyright 2016 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.test.mixin;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.impl.GuiMainMenu;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
@Mixin(value = GuiMainMenu.class, remap = false)
public abstract class MixinGuiMain extends GuiScreen {
@Inject(method = "drawScreen(IIF)V", at = @At("RETURN"))
public void draw(int a1, int a2, float a3, CallbackInfo info) {
this.fontRenderer.drawString("Fabric Test Mod", 2, this.height - 30, -1);
}
}
|
/*
* Copyright 2016 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.test.mixin;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.screen.menu.GuiMainMenu;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
@Mixin(value = GuiMainMenu.class, remap = false)
public abstract class MixinGuiMain extends GuiScreen {
@Inject(method = "drawScreen(IIF)V", at = @At("RETURN"))
public void draw(int a1, int a2, float a3, CallbackInfo info) {
this.fontRenderer.drawString("Fabric Test Mod", 2, this.height - 30, -1);
}
}
|
Fix test mod for latest mappings
|
Fix test mod for latest mappings
|
Java
|
apache-2.0
|
FabricMC/fabric-base
|
java
|
## Code Before:
/*
* Copyright 2016 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.test.mixin;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.impl.GuiMainMenu;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
@Mixin(value = GuiMainMenu.class, remap = false)
public abstract class MixinGuiMain extends GuiScreen {
@Inject(method = "drawScreen(IIF)V", at = @At("RETURN"))
public void draw(int a1, int a2, float a3, CallbackInfo info) {
this.fontRenderer.drawString("Fabric Test Mod", 2, this.height - 30, -1);
}
}
## Instruction:
Fix test mod for latest mappings
## Code After:
/*
* Copyright 2016 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.test.mixin;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.screen.menu.GuiMainMenu;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
@Mixin(value = GuiMainMenu.class, remap = false)
public abstract class MixinGuiMain extends GuiScreen {
@Inject(method = "drawScreen(IIF)V", at = @At("RETURN"))
public void draw(int a1, int a2, float a3, CallbackInfo info) {
this.fontRenderer.drawString("Fabric Test Mod", 2, this.height - 30, -1);
}
}
|
# ... existing code ...
package net.fabricmc.test.mixin;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.screen.menu.GuiMainMenu;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
# ... rest of the code ...
|
a527a21a77165b113d615fe4b1984cc7a9e3aa12
|
src/main/java/com/hp/autonomy/types/requests/idol/actions/answer/params/GetResourcesSortParam.java
|
src/main/java/com/hp/autonomy/types/requests/idol/actions/answer/params/GetResourcesSortParam.java
|
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.answer.params;
@SuppressWarnings({"WeakerAccess", "unused"})
public enum GetResourcesSortParam {
ID("id"),
CREATED_TIME("createdTime"),
METADATA("metadata"),
MODIFIED_TIME("modifiedTime"),
POPULARITY("popularity"),
RANDOM("random"),
STATUS("status");
private final String value;
GetResourcesSortParam(final String value) {
this.value = value;
}
public String value() {
return value;
}
}
|
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.answer.params;
@SuppressWarnings({"WeakerAccess", "unused"})
public enum GetResourcesSortParam {
ID("id"),
CREATED_TIME("createdTime"),
LIKELIHOOD("likelihood"),
METADATA("metadata"),
MODIFIED_TIME("modifiedTime"),
POPULARITY("popularity"),
RANDOM("random"),
STATUS("status");
private final String value;
GetResourcesSortParam(final String value) {
this.value = value;
}
public String value() {
return value;
}
}
|
Add support for AnswerServer likelihood
|
Add support for AnswerServer likelihood
[rev. matthew.gordon]
|
Java
|
mit
|
hpe-idol/java-aci-types,hpautonomy/java-aci-types,hpe-idol/java-aci-types,hpautonomy/java-aci-types
|
java
|
## Code Before:
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.answer.params;
@SuppressWarnings({"WeakerAccess", "unused"})
public enum GetResourcesSortParam {
ID("id"),
CREATED_TIME("createdTime"),
METADATA("metadata"),
MODIFIED_TIME("modifiedTime"),
POPULARITY("popularity"),
RANDOM("random"),
STATUS("status");
private final String value;
GetResourcesSortParam(final String value) {
this.value = value;
}
public String value() {
return value;
}
}
## Instruction:
Add support for AnswerServer likelihood
[rev. matthew.gordon]
## Code After:
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.answer.params;
@SuppressWarnings({"WeakerAccess", "unused"})
public enum GetResourcesSortParam {
ID("id"),
CREATED_TIME("createdTime"),
LIKELIHOOD("likelihood"),
METADATA("metadata"),
MODIFIED_TIME("modifiedTime"),
POPULARITY("popularity"),
RANDOM("random"),
STATUS("status");
private final String value;
GetResourcesSortParam(final String value) {
this.value = value;
}
public String value() {
return value;
}
}
|
# ... existing code ...
public enum GetResourcesSortParam {
ID("id"),
CREATED_TIME("createdTime"),
LIKELIHOOD("likelihood"),
METADATA("metadata"),
MODIFIED_TIME("modifiedTime"),
POPULARITY("popularity"),
# ... rest of the code ...
|
9660fb734ecf2ad2c181eba790cdd2ddc9ed423e
|
cyder/core/system/forms.py
|
cyder/core/system/forms.py
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.system.models import System, SystemAV
class SystemForm(forms.ModelForm):
class Meta:
model = System
class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin):
interface_type = forms.ChoiceField(
widget=forms.RadioSelect, choices=(
('Static', 'Static Interface'),
('Dynamic', 'Dynamic Interface')))
class Meta:
model = System
SystemAVForm = get_eav_form(SystemAV, System)
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.system.models import System, SystemAV
class SystemForm(forms.ModelForm):
class Meta:
model = System
class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin):
interface_type = forms.ChoiceField(
widget=forms.RadioSelect, choices=(
('static_interface', 'Static Interface'),
('dynamic_interface', 'Dynamic Interface')))
class Meta:
model = System
SystemAVForm = get_eav_form(SystemAV, System)
|
Fix system form interface_type choices
|
Fix system form interface_type choices
|
Python
|
bsd-3-clause
|
murrown/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,murrown/cyder,OSU-Net/cyder,murrown/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,zeeman/cyder,zeeman/cyder,OSU-Net/cyder,akeym/cyder,zeeman/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,drkitty/cyder,zeeman/cyder
|
python
|
## Code Before:
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.system.models import System, SystemAV
class SystemForm(forms.ModelForm):
class Meta:
model = System
class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin):
interface_type = forms.ChoiceField(
widget=forms.RadioSelect, choices=(
('Static', 'Static Interface'),
('Dynamic', 'Dynamic Interface')))
class Meta:
model = System
SystemAVForm = get_eav_form(SystemAV, System)
## Instruction:
Fix system form interface_type choices
## Code After:
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.core.system.models import System, SystemAV
class SystemForm(forms.ModelForm):
class Meta:
model = System
class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin):
interface_type = forms.ChoiceField(
widget=forms.RadioSelect, choices=(
('static_interface', 'Static Interface'),
('dynamic_interface', 'Dynamic Interface')))
class Meta:
model = System
SystemAVForm = get_eav_form(SystemAV, System)
|
// ... existing code ...
class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin):
interface_type = forms.ChoiceField(
widget=forms.RadioSelect, choices=(
('static_interface', 'Static Interface'),
('dynamic_interface', 'Dynamic Interface')))
class Meta:
model = System
// ... rest of the code ...
|
a2af3446bbb9ff2cc46fdde4a96c539f57a972f9
|
tests/integration/directconnect/test_directconnect.py
|
tests/integration/directconnect/test_directconnect.py
|
import boto
from unittest import TestCase
class DirectConnectTest(TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
Fix integration test for Python 2.6
|
Fix integration test for Python 2.6
|
Python
|
mit
|
Asana/boto,vijaylbais/boto,felix-d/boto,zachmullen/boto,vishnugonela/boto,weka-io/boto,revmischa/boto,weebygames/boto,nexusz99/boto,TiVoMaker/boto,garnaat/boto,alex/boto,ocadotechnology/boto,campenberger/boto,alex/boto,ddzialak/boto,awatts/boto,appneta/boto,clouddocx/boto,disruptek/boto,j-carl/boto,kouk/boto,darjus-amzn/boto,nikhilraog/boto,janslow/boto,lra/boto,acourtney2015/boto,pfhayes/boto,dimdung/boto,varunarya10/boto,s0enke/boto,zzzirk/boto,podhmo/boto,serviceagility/boto,tpodowd/boto,alfredodeza/boto,jindongh/boto,khagler/boto,drbild/boto,abridgett/boto,trademob/boto,ramitsurana/boto,ryansb/boto,nishigori/boto,rosmo/boto,shipci/boto,rayluo/boto,Pretio/boto,kouk/boto,SaranyaKarthikeyan/boto,drbild/boto,stevenbrichards/boto,appneta/boto,tpodowd/boto,disruptek/boto,bleib1dj/boto,ekalosak/boto,shaunbrady/boto,israelbenatar/boto,elainexmas/boto,jotes/boto,bryx-inc/boto,yangchaogit/boto
|
python
|
## Code Before:
import boto
from unittest import TestCase
class DirectConnectTest(TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
## Instruction:
Fix integration test for Python 2.6
## Code After:
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
"""
def test_basic(self):
conn = boto.connect_directconnect()
response = conn.describe_connections()
self.assertTrue(response)
self.assertTrue('connections' in response)
self.assertIsInstance(response['connections'], list)
|
...
import boto
from boto.compat import unittest
class DirectConnectTest(unittest.TestCase):
"""
A very basic test to make sure signatures and
basic calls work.
...
|
10f931ab6831f9fb403912a0d0d357d35561d099
|
subliminal/__init__.py
|
subliminal/__init__.py
|
from .api import list_subtitles, download_subtitles
from .async import Pool
from .infos import __version__
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__all__ = ['list_subtitles', 'download_subtitles', 'Pool']
logging.getLogger(__name__).addHandler(NullHandler())
|
from .api import list_subtitles, download_subtitles
from .async import Pool
from .core import (SERVICES, LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE,
MATCHING_CONFIDENCE)
from .infos import __version__
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool']
logging.getLogger(__name__).addHandler(NullHandler())
|
Add some core components to subliminal
|
Add some core components to subliminal
|
Python
|
mit
|
ofir123/subliminal,fernandog/subliminal,neo1691/subliminal,nvbn/subliminal,bogdal/subliminal,ravselj/subliminal,SickRage/subliminal,oxan/subliminal,t4lwh/subliminal,Elettronik/subliminal,kbkailashbagaria/subliminal,juanmhidalgo/subliminal,hpsbranco/subliminal,h3llrais3r/subliminal,ratoaq2/subliminal,getzze/subliminal,Diaoul/subliminal,pums974/subliminal,goll/subliminal
|
python
|
## Code Before:
from .api import list_subtitles, download_subtitles
from .async import Pool
from .infos import __version__
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__all__ = ['list_subtitles', 'download_subtitles', 'Pool']
logging.getLogger(__name__).addHandler(NullHandler())
## Instruction:
Add some core components to subliminal
## Code After:
from .api import list_subtitles, download_subtitles
from .async import Pool
from .core import (SERVICES, LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE,
MATCHING_CONFIDENCE)
from .infos import __version__
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool']
logging.getLogger(__name__).addHandler(NullHandler())
|
# ... existing code ...
from .api import list_subtitles, download_subtitles
from .async import Pool
from .core import (SERVICES, LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE,
MATCHING_CONFIDENCE)
from .infos import __version__
import logging
try:
# ... modified code ...
pass
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool']
logging.getLogger(__name__).addHandler(NullHandler())
# ... rest of the code ...
|
c0b9c9712e464f304bee7c63bfd6b197a1c5fb0f
|
cmsplugin_bootstrap_carousel/cms_plugins.py
|
cmsplugin_bootstrap_carousel/cms_plugins.py
|
import re
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_bootstrap_carousel.models import *
from django.utils.translation import ugettext as _
from django.contrib import admin
from django.forms import ModelForm, ValidationError
class CarouselForm(ModelForm):
class Meta:
model = Carousel
def clean_domid(self):
data = self.cleaned_data['domid']
if not re.match(r'^[a-zA-Z_]\w*$', data):
raise ValidationError(_("The name must be a single word beginning with a letter"))
return data
class CarouselItemInline(admin.StackedInline):
model = CarouselItem
class CarouselPlugin(CMSPluginBase):
model = Carousel
form = CarouselForm
name = _("Carousel")
render_template = "cmsplugin_bootstrap_carousel/carousel.html"
inlines = [
CarouselItemInline,
]
def render(self, context, instance, placeholder):
context.update({'instance' : instance})
return context
plugin_pool.register_plugin(CarouselPlugin)
|
import re
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_bootstrap_carousel.models import *
from django.utils.translation import ugettext as _
from django.contrib import admin
from django.forms import ModelForm, ValidationError
class CarouselForm(ModelForm):
class Meta:
model = Carousel
def clean_domid(self):
data = self.cleaned_data['domid']
if not re.match(r'^[a-zA-Z_]\w*$', data):
raise ValidationError(_("The name must be a single word beginning with a letter"))
return data
class CarouselItemInline(admin.StackedInline):
model = CarouselItem
extra = 0
class CarouselPlugin(CMSPluginBase):
model = Carousel
form = CarouselForm
name = _("Carousel")
render_template = "cmsplugin_bootstrap_carousel/carousel.html"
inlines = [
CarouselItemInline,
]
def render(self, context, instance, placeholder):
context.update({'instance' : instance})
return context
plugin_pool.register_plugin(CarouselPlugin)
|
Change extra from 3 to 0.
|
Change extra from 3 to 0.
|
Python
|
bsd-3-clause
|
360youlun/cmsplugin-bootstrap-carousel,360youlun/cmsplugin-bootstrap-carousel
|
python
|
## Code Before:
import re
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_bootstrap_carousel.models import *
from django.utils.translation import ugettext as _
from django.contrib import admin
from django.forms import ModelForm, ValidationError
class CarouselForm(ModelForm):
class Meta:
model = Carousel
def clean_domid(self):
data = self.cleaned_data['domid']
if not re.match(r'^[a-zA-Z_]\w*$', data):
raise ValidationError(_("The name must be a single word beginning with a letter"))
return data
class CarouselItemInline(admin.StackedInline):
model = CarouselItem
class CarouselPlugin(CMSPluginBase):
model = Carousel
form = CarouselForm
name = _("Carousel")
render_template = "cmsplugin_bootstrap_carousel/carousel.html"
inlines = [
CarouselItemInline,
]
def render(self, context, instance, placeholder):
context.update({'instance' : instance})
return context
plugin_pool.register_plugin(CarouselPlugin)
## Instruction:
Change extra from 3 to 0.
## Code After:
import re
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_bootstrap_carousel.models import *
from django.utils.translation import ugettext as _
from django.contrib import admin
from django.forms import ModelForm, ValidationError
class CarouselForm(ModelForm):
class Meta:
model = Carousel
def clean_domid(self):
data = self.cleaned_data['domid']
if not re.match(r'^[a-zA-Z_]\w*$', data):
raise ValidationError(_("The name must be a single word beginning with a letter"))
return data
class CarouselItemInline(admin.StackedInline):
model = CarouselItem
extra = 0
class CarouselPlugin(CMSPluginBase):
model = Carousel
form = CarouselForm
name = _("Carousel")
render_template = "cmsplugin_bootstrap_carousel/carousel.html"
inlines = [
CarouselItemInline,
]
def render(self, context, instance, placeholder):
context.update({'instance' : instance})
return context
plugin_pool.register_plugin(CarouselPlugin)
|
...
class CarouselItemInline(admin.StackedInline):
model = CarouselItem
extra = 0
class CarouselPlugin(CMSPluginBase):
...
|
99f862b6c123b8c6d81e931254c061e64431bccc
|
pysingcells/logger.py
|
pysingcells/logger.py
|
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
|
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
|
Add time in logging trace
|
Add time in logging trace
|
Python
|
mit
|
Fougere87/pysingcells
|
python
|
## Code Before:
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
## Instruction:
Add time in logging trace
## Code After:
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
|
...
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
...
|
898028dea2e04d52c32854752bda34d331c7696f
|
ynr/apps/candidatebot/management/commands/candidatebot_import_email_from_csv.py
|
ynr/apps/candidatebot/management/commands/candidatebot_import_email_from_csv.py
|
from __future__ import unicode_literals
import csv
from django.core.management.base import BaseCommand
from candidatebot.helpers import CandidateBot
from popolo.models import Person
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'filename',
help='Path to the file with the email addresses'
)
parser.add_argument(
'--source',
help='Source of the data. The source CSV column takes precedence'
)
def handle(self, **options):
with open(options['filename'], 'r') as fh:
reader = csv.DictReader(fh)
for row in reader:
source = row.get('source', options.get('source'))
if not row['democlub_id']:
continue
if not source:
raise ValueError("A source is required")
try:
bot = CandidateBot(row['democlub_id'])
bot.add_email(row['email'])
bot.save(source)
# print(person)
except Person.DoesNotExist:
print("Person ID {} not found".format(
row['democlub_id']))
# print(row)
|
from __future__ import unicode_literals
import csv
from django.core.management.base import BaseCommand
from candidatebot.helpers import CandidateBot
from popolo.models import Person
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'filename',
help='Path to the file with the email addresses'
)
parser.add_argument(
'--source',
help='Source of the data. The source CSV column takes precedence'
)
def handle(self, **options):
with open(options['filename'], 'r') as fh:
reader = csv.DictReader(fh)
for row in reader:
source = row.get('source', options.get('source'))
if not row['democlub_id']:
continue
if not source:
raise ValueError("A source is required")
try:
bot = CandidateBot(row['democlub_id'])
try:
bot.add_email(row['email'])
bot.save(source)
except ValueError:
#Email exists, move on
pass
except Person.DoesNotExist:
print("Person ID {} not found".format(
row['democlub_id']))
# print(row)
|
Move on if email exists
|
Move on if email exists
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
python
|
## Code Before:
from __future__ import unicode_literals
import csv
from django.core.management.base import BaseCommand
from candidatebot.helpers import CandidateBot
from popolo.models import Person
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'filename',
help='Path to the file with the email addresses'
)
parser.add_argument(
'--source',
help='Source of the data. The source CSV column takes precedence'
)
def handle(self, **options):
with open(options['filename'], 'r') as fh:
reader = csv.DictReader(fh)
for row in reader:
source = row.get('source', options.get('source'))
if not row['democlub_id']:
continue
if not source:
raise ValueError("A source is required")
try:
bot = CandidateBot(row['democlub_id'])
bot.add_email(row['email'])
bot.save(source)
# print(person)
except Person.DoesNotExist:
print("Person ID {} not found".format(
row['democlub_id']))
# print(row)
## Instruction:
Move on if email exists
## Code After:
from __future__ import unicode_literals
import csv
from django.core.management.base import BaseCommand
from candidatebot.helpers import CandidateBot
from popolo.models import Person
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'filename',
help='Path to the file with the email addresses'
)
parser.add_argument(
'--source',
help='Source of the data. The source CSV column takes precedence'
)
def handle(self, **options):
with open(options['filename'], 'r') as fh:
reader = csv.DictReader(fh)
for row in reader:
source = row.get('source', options.get('source'))
if not row['democlub_id']:
continue
if not source:
raise ValueError("A source is required")
try:
bot = CandidateBot(row['democlub_id'])
try:
bot.add_email(row['email'])
bot.save(source)
except ValueError:
#Email exists, move on
pass
except Person.DoesNotExist:
print("Person ID {} not found".format(
row['democlub_id']))
# print(row)
|
# ... existing code ...
try:
bot = CandidateBot(row['democlub_id'])
try:
bot.add_email(row['email'])
bot.save(source)
except ValueError:
#Email exists, move on
pass
except Person.DoesNotExist:
print("Person ID {} not found".format(
row['democlub_id']))
# ... rest of the code ...
|
caddef7500241135965e6d91ca94a38224bfd0cd
|
ui2/path_helpers.py
|
ui2/path_helpers.py
|
import ui
def get_path_image(path):
""" Get an image of a path """
bounds = path.bounds
with ui.ImageContext(bounds.max_x, bounds.max_y) as ctx:
path.fill()
return ctx.get_image()
def copy_path(path):
""" Make a copy of a ui.Path and return it. Preserves all data. """
new = ui.Path()
new.append_path(path)
# Copy over the attributes
new.line_cap_style = path.line_cap_style
new.line_join_style = path.line_join_style
new.line_width = path.line_width
return new
|
import ui
import objc_util
def get_path_image(path):
""" Get an image of a path """
bounds = path.bounds
with ui.ImageContext(bounds.max_x, bounds.max_y) as ctx:
path.fill()
return ctx.get_image()
def copy_path(path):
""" Make a copy of a ui.Path and return it. Preserves all data. """
new = ui.Path()
new.append_path(path)
# Copy over the attributes
new.line_cap_style = path.line_cap_style
new.line_join_style = path.line_join_style
new.line_width = path.line_width
return new
def scale_path(path, scale):
""" Stretch or scale a path. Pass either a scale or a tuple of scales """
if not hasattr(scale, "__iter__"):
scale = (scale, scale)
sx, sy = scale
newpath = copy_path(path)
# Construct an affine transformation matrix
transform = objc_util.CGAffineTransform(sx, 0, 0, sy, 0, 0)
# Apply it to the path
objcpath = objc_util.ObjCInstance(newpath)
objcpath.applyTransform_(transform)
return newpath
|
Add method for scaling path
|
Add method for scaling path
|
Python
|
mit
|
controversial/ui2
|
python
|
## Code Before:
import ui
def get_path_image(path):
""" Get an image of a path """
bounds = path.bounds
with ui.ImageContext(bounds.max_x, bounds.max_y) as ctx:
path.fill()
return ctx.get_image()
def copy_path(path):
""" Make a copy of a ui.Path and return it. Preserves all data. """
new = ui.Path()
new.append_path(path)
# Copy over the attributes
new.line_cap_style = path.line_cap_style
new.line_join_style = path.line_join_style
new.line_width = path.line_width
return new
## Instruction:
Add method for scaling path
## Code After:
import ui
import objc_util
def get_path_image(path):
""" Get an image of a path """
bounds = path.bounds
with ui.ImageContext(bounds.max_x, bounds.max_y) as ctx:
path.fill()
return ctx.get_image()
def copy_path(path):
""" Make a copy of a ui.Path and return it. Preserves all data. """
new = ui.Path()
new.append_path(path)
# Copy over the attributes
new.line_cap_style = path.line_cap_style
new.line_join_style = path.line_join_style
new.line_width = path.line_width
return new
def scale_path(path, scale):
""" Stretch or scale a path. Pass either a scale or a tuple of scales """
if not hasattr(scale, "__iter__"):
scale = (scale, scale)
sx, sy = scale
newpath = copy_path(path)
# Construct an affine transformation matrix
transform = objc_util.CGAffineTransform(sx, 0, 0, sy, 0, 0)
# Apply it to the path
objcpath = objc_util.ObjCInstance(newpath)
objcpath.applyTransform_(transform)
return newpath
|
// ... existing code ...
import ui
import objc_util
def get_path_image(path):
// ... modified code ...
new.line_width = path.line_width
return new
def scale_path(path, scale):
""" Stretch or scale a path. Pass either a scale or a tuple of scales """
if not hasattr(scale, "__iter__"):
scale = (scale, scale)
sx, sy = scale
newpath = copy_path(path)
# Construct an affine transformation matrix
transform = objc_util.CGAffineTransform(sx, 0, 0, sy, 0, 0)
# Apply it to the path
objcpath = objc_util.ObjCInstance(newpath)
objcpath.applyTransform_(transform)
return newpath
// ... rest of the code ...
|
b3979a46a7bcd71aa9b40892167910fdeed5ad97
|
frigg/projects/admin.py
|
frigg/projects/admin.py
|
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .forms import EnvironmentVariableForm
from .models import EnvironmentVariable, Project
class EnvironmentVariableMixin:
form = EnvironmentVariableForm
@staticmethod
def get_readonly_fields(request, obj=None):
if obj:
return 'key', 'value', 'is_secret'
class EnvironmentVariableInline(EnvironmentVariableMixin, admin.TabularInline):
model = EnvironmentVariable
extra = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'queue_name', 'approved', 'number_of_members', 'average_time',
'last_build_number', 'can_deploy')
list_filter = ['owner', 'queue_name', 'approved', 'can_deploy']
actions = ['sync_members']
inlines = [EnvironmentVariableInline]
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(EnvironmentVariable)
class EnvironmentVariableAdmin(EnvironmentVariableMixin, admin.ModelAdmin):
list_display = (
'__str__',
'is_secret',
)
|
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .forms import EnvironmentVariableForm
from .models import EnvironmentVariable, Project
class EnvironmentVariableMixin:
form = EnvironmentVariableForm
@staticmethod
def get_readonly_fields(request, obj=None):
if obj:
return 'key', 'value', 'is_secret'
return tuple()
class EnvironmentVariableInline(EnvironmentVariableMixin, admin.TabularInline):
model = EnvironmentVariable
extra = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'queue_name', 'approved', 'number_of_members', 'average_time',
'last_build_number', 'can_deploy')
list_filter = ['owner', 'queue_name', 'approved', 'can_deploy']
actions = ['sync_members']
inlines = [EnvironmentVariableInline]
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(EnvironmentVariable)
class EnvironmentVariableAdmin(EnvironmentVariableMixin, admin.ModelAdmin):
list_display = (
'__str__',
'is_secret',
)
|
Return empty tuple in get_readonly_fields
|
fix: Return empty tuple in get_readonly_fields
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
python
|
## Code Before:
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .forms import EnvironmentVariableForm
from .models import EnvironmentVariable, Project
class EnvironmentVariableMixin:
form = EnvironmentVariableForm
@staticmethod
def get_readonly_fields(request, obj=None):
if obj:
return 'key', 'value', 'is_secret'
class EnvironmentVariableInline(EnvironmentVariableMixin, admin.TabularInline):
model = EnvironmentVariable
extra = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'queue_name', 'approved', 'number_of_members', 'average_time',
'last_build_number', 'can_deploy')
list_filter = ['owner', 'queue_name', 'approved', 'can_deploy']
actions = ['sync_members']
inlines = [EnvironmentVariableInline]
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(EnvironmentVariable)
class EnvironmentVariableAdmin(EnvironmentVariableMixin, admin.ModelAdmin):
list_display = (
'__str__',
'is_secret',
)
## Instruction:
fix: Return empty tuple in get_readonly_fields
## Code After:
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .forms import EnvironmentVariableForm
from .models import EnvironmentVariable, Project
class EnvironmentVariableMixin:
form = EnvironmentVariableForm
@staticmethod
def get_readonly_fields(request, obj=None):
if obj:
return 'key', 'value', 'is_secret'
return tuple()
class EnvironmentVariableInline(EnvironmentVariableMixin, admin.TabularInline):
model = EnvironmentVariable
extra = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'queue_name', 'approved', 'number_of_members', 'average_time',
'last_build_number', 'can_deploy')
list_filter = ['owner', 'queue_name', 'approved', 'can_deploy']
actions = ['sync_members']
inlines = [EnvironmentVariableInline]
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(EnvironmentVariable)
class EnvironmentVariableAdmin(EnvironmentVariableMixin, admin.ModelAdmin):
list_display = (
'__str__',
'is_secret',
)
|
...
def get_readonly_fields(request, obj=None):
if obj:
return 'key', 'value', 'is_secret'
return tuple()
class EnvironmentVariableInline(EnvironmentVariableMixin, admin.TabularInline):
...
|
46e9db6167a9c4f7f778381da888537c00d35bfd
|
emailsupport/admin.py
|
emailsupport/admin.py
|
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
|
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
|
Add prev. and next email to context only if exist original (current)
|
Add prev. and next email to context only if exist original (current)
|
Python
|
mit
|
rosti-cz/django-emailsupport
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
## Instruction:
Add prev. and next email to context only if exist original (current)
## Code After:
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
|
// ... existing code ...
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
// ... rest of the code ...
|
ee758e12198cd82c29a387193a154e6a8f88fc99
|
src/domain.h
|
src/domain.h
|
/***************************************************************************//**
* \file domain.h
* \author Krishnan, A. ([email protected])
* \brief Definition of the class \c domain
*/
#pragma once
#include "types.h"
/**
* \class domain
* \brief Store the mesh grid information
*/
class domain
{
public:
int nx, ///< number of cells in the x-direction
ny; ///< number of cells in the y-direction
vecH x, ///< x-coordinates of the nodes
y, ///< y-coordinates of the nodes
dx, ///< cell widths in the x-direction
dy; ///< cell widths in the y-direction
vecD xD, ///< x-coordinates of the nodes stored on the device
yD, ///< y-coordinates of the nodes stored on the device
dxD, ///< x- cell widths stored on the device
dyD; ///< y- cell widths stored on the device
vecH xu, ///< x-coordinates of the locations at which the x-component of velocity is evaluated
yu, ///< y-coordinates of the locations at which the x-component of velocity is evaluated
xv, ///< x-coordinates of the locations at which the y-component of velocity is evaluated
yv; ///< y-coordinates of the locations at which the y-component of velocity is evaluated
};
|
/***************************************************************************//**
* \file domain.h
* \author Anush Krishnan ([email protected])
* \brief Definition of the class \c domain.
*/
#pragma once
#include "types.h"
/**
* \class domain
* \brief Stores information about the computational grid.
*/
class domain
{
public:
int nx, ///< number of cells in the x-direction
ny; ///< number of cells in the y-direction
vecH x, ///< x-coordinates of the nodes
y, ///< y-coordinates of the nodes
dx, ///< cell widths in the x-direction
dy; ///< cell widths in the y-direction
vecD xD, ///< x-coordinates of the nodes stored on the device
yD, ///< y-coordinates of the nodes stored on the device
dxD, ///< x- cell widths stored on the device
dyD; ///< y- cell widths stored on the device
vecH xu, ///< x-coordinates of the locations at which the x-component of velocity is evaluated
yu, ///< y-coordinates of the locations at which the x-component of velocity is evaluated
xv, ///< x-coordinates of the locations at which the y-component of velocity is evaluated
yv; ///< y-coordinates of the locations at which the y-component of velocity is evaluated
};
|
Update Doxygen documentation with conventions
|
Update Doxygen documentation with conventions
|
C
|
mit
|
barbagroup/cuIBM,barbagroup/cuIBM,barbagroup/cuIBM
|
c
|
## Code Before:
/***************************************************************************//**
* \file domain.h
* \author Krishnan, A. ([email protected])
* \brief Definition of the class \c domain
*/
#pragma once
#include "types.h"
/**
* \class domain
* \brief Store the mesh grid information
*/
class domain
{
public:
int nx, ///< number of cells in the x-direction
ny; ///< number of cells in the y-direction
vecH x, ///< x-coordinates of the nodes
y, ///< y-coordinates of the nodes
dx, ///< cell widths in the x-direction
dy; ///< cell widths in the y-direction
vecD xD, ///< x-coordinates of the nodes stored on the device
yD, ///< y-coordinates of the nodes stored on the device
dxD, ///< x- cell widths stored on the device
dyD; ///< y- cell widths stored on the device
vecH xu, ///< x-coordinates of the locations at which the x-component of velocity is evaluated
yu, ///< y-coordinates of the locations at which the x-component of velocity is evaluated
xv, ///< x-coordinates of the locations at which the y-component of velocity is evaluated
yv; ///< y-coordinates of the locations at which the y-component of velocity is evaluated
};
## Instruction:
Update Doxygen documentation with conventions
## Code After:
/***************************************************************************//**
* \file domain.h
* \author Anush Krishnan ([email protected])
* \brief Definition of the class \c domain.
*/
#pragma once
#include "types.h"
/**
* \class domain
* \brief Stores information about the computational grid.
*/
class domain
{
public:
int nx, ///< number of cells in the x-direction
ny; ///< number of cells in the y-direction
vecH x, ///< x-coordinates of the nodes
y, ///< y-coordinates of the nodes
dx, ///< cell widths in the x-direction
dy; ///< cell widths in the y-direction
vecD xD, ///< x-coordinates of the nodes stored on the device
yD, ///< y-coordinates of the nodes stored on the device
dxD, ///< x- cell widths stored on the device
dyD; ///< y- cell widths stored on the device
vecH xu, ///< x-coordinates of the locations at which the x-component of velocity is evaluated
yu, ///< y-coordinates of the locations at which the x-component of velocity is evaluated
xv, ///< x-coordinates of the locations at which the y-component of velocity is evaluated
yv; ///< y-coordinates of the locations at which the y-component of velocity is evaluated
};
|
// ... existing code ...
/***************************************************************************//**
* \file domain.h
* \author Anush Krishnan ([email protected])
* \brief Definition of the class \c domain.
*/
#pragma once
// ... modified code ...
/**
* \class domain
* \brief Stores information about the computational grid.
*/
class domain
{
public:
// ... rest of the code ...
|
d0bcfebd2f85ec0ba17812ad4e98ef738dae1163
|
menpo/shape/groupops.py
|
menpo/shape/groupops.py
|
from .pointcloud import PointCloud
import numpy as np
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(np.mean([pc.points for pc in pointclouds], axis=0))
|
from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
|
Update mean_pointcloud to be faster
|
Update mean_pointcloud to be faster
This is actually faster than using numpy. It is also MUCH
faster if it gets jitted by something like pypy or numba.
|
Python
|
bsd-3-clause
|
mozata/menpo,mozata/menpo,patricksnape/menpo,menpo/menpo,grigorisg9gr/menpo,mozata/menpo,yuxiang-zhou/menpo,menpo/menpo,mozata/menpo,grigorisg9gr/menpo,yuxiang-zhou/menpo,grigorisg9gr/menpo,patricksnape/menpo,menpo/menpo,patricksnape/menpo,yuxiang-zhou/menpo
|
python
|
## Code Before:
from .pointcloud import PointCloud
import numpy as np
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(np.mean([pc.points for pc in pointclouds], axis=0))
## Instruction:
Update mean_pointcloud to be faster
This is actually faster than using numpy. It is also MUCH
faster if it gets jitted by something like pypy or numba.
## Code After:
from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
|
...
from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
...
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
...
|
dde3ee5113fd0ae40cfb6788e4bd0a2a8c60eb72
|
t/version_t.c
|
t/version_t.c
|
int main(void)
{
const char *version = MMDB_lib_version();
ok(version != NULL, "MMDB_lib_version exists");
ok(strcmp(version, PACKAGE_VERSION) == 0, "version is " PACKAGE_VERSION);
done_testing();
}
|
int main(void)
{
const char *version = MMDB_lib_version();
ok(version != NULL, "MMDB_lib_version exists");
if ( version )
ok(strcmp(version, PACKAGE_VERSION) == 0, "version is " PACKAGE_VERSION);
done_testing();
}
|
Check version string only if MMDB_lib_version != NULL
|
Check version string only if MMDB_lib_version != NULL
|
C
|
apache-2.0
|
maxmind/libmaxminddb,maxmind/libmaxminddb,maxmind/libmaxminddb
|
c
|
## Code Before:
int main(void)
{
const char *version = MMDB_lib_version();
ok(version != NULL, "MMDB_lib_version exists");
ok(strcmp(version, PACKAGE_VERSION) == 0, "version is " PACKAGE_VERSION);
done_testing();
}
## Instruction:
Check version string only if MMDB_lib_version != NULL
## Code After:
int main(void)
{
const char *version = MMDB_lib_version();
ok(version != NULL, "MMDB_lib_version exists");
if ( version )
ok(strcmp(version, PACKAGE_VERSION) == 0, "version is " PACKAGE_VERSION);
done_testing();
}
|
// ... existing code ...
{
const char *version = MMDB_lib_version();
ok(version != NULL, "MMDB_lib_version exists");
if ( version )
ok(strcmp(version, PACKAGE_VERSION) == 0, "version is " PACKAGE_VERSION);
done_testing();
}
// ... rest of the code ...
|
37656881f6360da03ecf7c4cbcc51f7a0eae883c
|
Sensorama/Sensorama/SRDataModel.h
|
Sensorama/Sensorama/SRDataModel.h
|
//
// SRDataModel.h
// Sensorama
//
// Created by Wojciech Adam Koszek (h) on 19/04/2016.
// Copyright © 2016 Wojciech Adam Koszek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Realm/Realm.h"
@interface SRDataPoint : RLMObject
@property NSNumber<RLMInt> *accX;
@property NSNumber<RLMInt> *accY;
@property NSNumber<RLMInt> *accZ;
@property NSNumber<RLMInt> *magX;
@property NSNumber<RLMInt> *magY;
@property NSNumber<RLMInt> *magZ;
@property NSNumber<RLMInt> *gyroX;
@property NSNumber<RLMInt> *gyroY;
@property NSNumber<RLMInt> *gyroZ;
@property NSInteger fileId;
@property NSInteger curTime;
@end
@interface SRDataFile : RLMObject
@property NSString *username;
@property NSString *desc;
@property NSString *timezone;
/* need to do something about device_info */
@property NSInteger sampleInterval;
@property BOOL accEnabled;
@property BOOL magEnabled;
@property BOOL gyroEnabled;
@property NSDate *dateStart;
@property NSDate *dateEnd;
@property NSInteger fileId;
@end
|
//
// SRDataModel.h
// Sensorama
//
// Created by Wojciech Adam Koszek (h) on 19/04/2016.
// Copyright © 2016 Wojciech Adam Koszek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Realm/Realm.h"
@interface SRDataPoint : RLMObject
@property NSNumber<RLMInt> *accX;
@property NSNumber<RLMInt> *accY;
@property NSNumber<RLMInt> *accZ;
@property NSNumber<RLMInt> *magX;
@property NSNumber<RLMInt> *magY;
@property NSNumber<RLMInt> *magZ;
@property NSNumber<RLMInt> *gyroX;
@property NSNumber<RLMInt> *gyroY;
@property NSNumber<RLMInt> *gyroZ;
@property NSInteger fileId;
@property NSInteger curTime;
@end
RLM_ARRAY_TYPE(SRDataPoint)
@interface SRDataFile : RLMObject
@property NSString *username;
@property NSString *desc;
@property NSString *timezone;
/* need to do something about device_info */
@property NSInteger sampleInterval;
@property BOOL accEnabled;
@property BOOL magEnabled;
@property BOOL gyroEnabled;
@property NSDate *dateStart;
@property NSDate *dateEnd;
@property NSInteger fileId;
@property RLMArray<SRDataPoint> *dataPoints;
@end
RLM_ARRAY_TYPE(SRDataFile)
|
Add arrays to the data model.
|
Add arrays to the data model.
|
C
|
bsd-2-clause
|
wkoszek/sensorama-ios,wkoszek/sensorama-ios,wkoszek/sensorama-ios,wkoszek/sensorama-ios
|
c
|
## Code Before:
//
// SRDataModel.h
// Sensorama
//
// Created by Wojciech Adam Koszek (h) on 19/04/2016.
// Copyright © 2016 Wojciech Adam Koszek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Realm/Realm.h"
@interface SRDataPoint : RLMObject
@property NSNumber<RLMInt> *accX;
@property NSNumber<RLMInt> *accY;
@property NSNumber<RLMInt> *accZ;
@property NSNumber<RLMInt> *magX;
@property NSNumber<RLMInt> *magY;
@property NSNumber<RLMInt> *magZ;
@property NSNumber<RLMInt> *gyroX;
@property NSNumber<RLMInt> *gyroY;
@property NSNumber<RLMInt> *gyroZ;
@property NSInteger fileId;
@property NSInteger curTime;
@end
@interface SRDataFile : RLMObject
@property NSString *username;
@property NSString *desc;
@property NSString *timezone;
/* need to do something about device_info */
@property NSInteger sampleInterval;
@property BOOL accEnabled;
@property BOOL magEnabled;
@property BOOL gyroEnabled;
@property NSDate *dateStart;
@property NSDate *dateEnd;
@property NSInteger fileId;
@end
## Instruction:
Add arrays to the data model.
## Code After:
//
// SRDataModel.h
// Sensorama
//
// Created by Wojciech Adam Koszek (h) on 19/04/2016.
// Copyright © 2016 Wojciech Adam Koszek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Realm/Realm.h"
@interface SRDataPoint : RLMObject
@property NSNumber<RLMInt> *accX;
@property NSNumber<RLMInt> *accY;
@property NSNumber<RLMInt> *accZ;
@property NSNumber<RLMInt> *magX;
@property NSNumber<RLMInt> *magY;
@property NSNumber<RLMInt> *magZ;
@property NSNumber<RLMInt> *gyroX;
@property NSNumber<RLMInt> *gyroY;
@property NSNumber<RLMInt> *gyroZ;
@property NSInteger fileId;
@property NSInteger curTime;
@end
RLM_ARRAY_TYPE(SRDataPoint)
@interface SRDataFile : RLMObject
@property NSString *username;
@property NSString *desc;
@property NSString *timezone;
/* need to do something about device_info */
@property NSInteger sampleInterval;
@property BOOL accEnabled;
@property BOOL magEnabled;
@property BOOL gyroEnabled;
@property NSDate *dateStart;
@property NSDate *dateEnd;
@property NSInteger fileId;
@property RLMArray<SRDataPoint> *dataPoints;
@end
RLM_ARRAY_TYPE(SRDataFile)
|
// ... existing code ...
@property NSInteger curTime;
@end
RLM_ARRAY_TYPE(SRDataPoint)
@interface SRDataFile : RLMObject
// ... modified code ...
@property NSDate *dateEnd;
@property NSInteger fileId;
@property RLMArray<SRDataPoint> *dataPoints;
@end
RLM_ARRAY_TYPE(SRDataFile)
// ... rest of the code ...
|
6515d159ab3d09f4ac6157b0f825157c4ed1f5c9
|
botbot/checks.py
|
botbot/checks.py
|
"""Functions for checking files"""
import os
import stat
from .checker import is_link
def file_exists(path):
try:
with open(path, mode='r') as test:
pass
except FileNotFoundError:
if is_link(path):
return 'PROB_BROKEN_LINK'
except OSError:
return 'PROB_UNKNOWN_ERROR'
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
|
"""Functions for checking files"""
import os
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
|
Clean up some loose ends
|
Clean up some loose ends
|
Python
|
mit
|
jackstanek/BotBot,jackstanek/BotBot
|
python
|
## Code Before:
"""Functions for checking files"""
import os
import stat
from .checker import is_link
def file_exists(path):
try:
with open(path, mode='r') as test:
pass
except FileNotFoundError:
if is_link(path):
return 'PROB_BROKEN_LINK'
except OSError:
return 'PROB_UNKNOWN_ERROR'
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
## Instruction:
Clean up some loose ends
## Code After:
"""Functions for checking files"""
import os
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
if os.path.splitext(path)[1] == ".fastq":
if not is_link(path):
return 'PROB_FILE_IS_FASTQ'
def sam_should_compress(path):
"""Check if a *.SAM file should be compressed or deleted"""
name, ext = os.path.splitext(path)
if ext == '.sam':
if os.path.isfile('.'.join((name, 'bam'))):
return 'PROB_SAM_AND_BAM_EXIST'
else:
return 'PROB_SAM_SHOULD_COMPRESS'
|
// ... existing code ...
import stat
from .checker import is_link
def is_fastq(path):
"""Check whether a given file is a fastq file."""
// ... rest of the code ...
|
994606d2641115f8af59657204d3d64f540bbfbd
|
data_structures/linked_list.py
|
data_structures/linked_list.py
|
class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def __repr__(self):
pass
def __len__(self):
pass
def __iter__(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
|
class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def pop(self):
pass
|
Update magic methods, and reorg args.
|
Update magic methods, and reorg args.
|
Python
|
mit
|
sjschmidt44/python_data_structures
|
python
|
## Code Before:
class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def __repr__(self):
pass
def __len__(self):
pass
def __iter__(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
## Instruction:
Update magic methods, and reorg args.
## Code After:
class Node(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
def __repr__(self):
return '{val}'.format(val=self.val)
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def pop(self):
pass
|
# ... existing code ...
class LinkedList(object):
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
'''Print string representation of Linked List.'''
node = self.head
output = ''
for node in self:
output += '{!r}'.format(node.val)
return '({})'.format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self):
pass
def size(self):
# ... modified code ...
def remove(self):
pass
def pop(self):
pass
# ... rest of the code ...
|
819f36493e1e0112c3bbe4f92f87f1771cc4af3f
|
moa/base.py
|
moa/base.py
|
'''
* when dispatching events, returning True stops it.
'''
from weakref import ref
from kivy.event import EventDispatcher
from kivy.properties import StringProperty, OptionProperty, ObjectProperty
import logging
class MoaException(Exception):
pass
class MoaBase(EventDispatcher):
named_moas = {}
''' A weakref.ref to the named moa instances.
Read only.
'''
_last_name = ''
def __init__(self, **kwargs):
super(MoaBase, self).__init__(**kwargs)
def verfiy_name(instance, value):
named_moas = MoaBase.named_moas
old_name = self._last_name
if value == old_name:
return
if old_name:
del named_moas[old_name]
if value:
if value in named_moas and named_moas[value]() is not None:
raise ValueError('Moa instance with name {} already '
'exists: {}'.format(value, named_moas[value]()))
else:
named_moas[value] = ref(self)
self._last_name = value
self.bind(name=verfiy_name)
verfiy_name(self, self.name)
name = StringProperty('')
''' Unique name across all Moa objects
'''
logger = ObjectProperty(logging.getLogger('moa'),
baseclass=logging.Logger)
source = StringProperty('')
''' E.g. a filename to load that interpreted by the subclass.
'''
|
'''
* when dispatching events, returning True stops it.
'''
__all__ = ('MoaBase', )
from weakref import ref
from kivy.event import EventDispatcher
from kivy.properties import StringProperty, OptionProperty, ObjectProperty
import logging
class MoaBase(EventDispatcher):
named_moas = {}
''' A weakref.ref to the named moa instances.
Read only.
'''
_last_name = ''
def __init__(self, **kwargs):
super(MoaBase, self).__init__(**kwargs)
def verfiy_name(instance, value):
named_moas = MoaBase.named_moas
old_name = self._last_name
if value == old_name:
return
if old_name:
del named_moas[old_name]
if value:
if value in named_moas and named_moas[value]() is not None:
raise ValueError('Moa instance with name {} already '
'exists: {}'.format(value, named_moas[value]()))
else:
named_moas[value] = ref(self)
self._last_name = value
self.bind(name=verfiy_name)
verfiy_name(self, self.name)
name = StringProperty('')
''' Unique name across all Moa objects
'''
logger = ObjectProperty(logging.getLogger('moa'),
baseclass=logging.Logger)
source = StringProperty('')
''' E.g. a filename to load that interpreted by the subclass.
'''
|
Remove unused moa exception class.
|
Remove unused moa exception class.
|
Python
|
mit
|
matham/moa
|
python
|
## Code Before:
'''
* when dispatching events, returning True stops it.
'''
from weakref import ref
from kivy.event import EventDispatcher
from kivy.properties import StringProperty, OptionProperty, ObjectProperty
import logging
class MoaException(Exception):
pass
class MoaBase(EventDispatcher):
named_moas = {}
''' A weakref.ref to the named moa instances.
Read only.
'''
_last_name = ''
def __init__(self, **kwargs):
super(MoaBase, self).__init__(**kwargs)
def verfiy_name(instance, value):
named_moas = MoaBase.named_moas
old_name = self._last_name
if value == old_name:
return
if old_name:
del named_moas[old_name]
if value:
if value in named_moas and named_moas[value]() is not None:
raise ValueError('Moa instance with name {} already '
'exists: {}'.format(value, named_moas[value]()))
else:
named_moas[value] = ref(self)
self._last_name = value
self.bind(name=verfiy_name)
verfiy_name(self, self.name)
name = StringProperty('')
''' Unique name across all Moa objects
'''
logger = ObjectProperty(logging.getLogger('moa'),
baseclass=logging.Logger)
source = StringProperty('')
''' E.g. a filename to load that interpreted by the subclass.
'''
## Instruction:
Remove unused moa exception class.
## Code After:
'''
* when dispatching events, returning True stops it.
'''
__all__ = ('MoaBase', )
from weakref import ref
from kivy.event import EventDispatcher
from kivy.properties import StringProperty, OptionProperty, ObjectProperty
import logging
class MoaBase(EventDispatcher):
named_moas = {}
''' A weakref.ref to the named moa instances.
Read only.
'''
_last_name = ''
def __init__(self, **kwargs):
super(MoaBase, self).__init__(**kwargs)
def verfiy_name(instance, value):
named_moas = MoaBase.named_moas
old_name = self._last_name
if value == old_name:
return
if old_name:
del named_moas[old_name]
if value:
if value in named_moas and named_moas[value]() is not None:
raise ValueError('Moa instance with name {} already '
'exists: {}'.format(value, named_moas[value]()))
else:
named_moas[value] = ref(self)
self._last_name = value
self.bind(name=verfiy_name)
verfiy_name(self, self.name)
name = StringProperty('')
''' Unique name across all Moa objects
'''
logger = ObjectProperty(logging.getLogger('moa'),
baseclass=logging.Logger)
source = StringProperty('')
''' E.g. a filename to load that interpreted by the subclass.
'''
|
...
'''
* when dispatching events, returning True stops it.
'''
__all__ = ('MoaBase', )
from weakref import ref
...
from kivy.event import EventDispatcher
from kivy.properties import StringProperty, OptionProperty, ObjectProperty
import logging
class MoaBase(EventDispatcher):
...
|
df131a8f482e712546555e0cb28a58edcf960bf2
|
apps/planet/management/commands/update_all_feeds.py
|
apps/planet/management/commands/update_all_feeds.py
|
from django.core.management.base import BaseCommand
from planet.management.commands import process_feed
from planet.models import Feed
from planet.signals import feeds_updated
class Command(BaseCommand):
"""
Command to add a complete blog feed to our db.
Usage:
./manage.py add_feed <feed_url>
"""
def handle(self, *args, **options):
for feed_url in Feed.site_objects.all().values_list("url", flat=True):
# process feed in create-mode
process_feed(feed_url, create=False)
feeds_updated.send(sender=self, instance=self)
|
from datetime import datetime
from django.core.management.base import BaseCommand
from planet.management.commands import process_feed
from planet.models import Feed
from planet.signals import feeds_updated
class Command(BaseCommand):
"""
Command to add a complete blog feed to our db.
Usage:
./manage.py add_feed <feed_url>
"""
def handle(self, *args, **options):
new_posts_count = 0
start = datetime.now()
for feed_url in Feed.site_objects.all().values_list("url", flat=True):
# process feed in create-mode
new_posts_count += process_feed(feed_url, create=False)
delta = datetime.now() - start
print "Added %s posts in %d seconds" % (new_posts_count, delta.seconds)
feeds_updated.send(sender=self, instance=self)
|
Print total number of posts added and total elapsed time
|
Print total number of posts added and total elapsed time
|
Python
|
bsd-3-clause
|
matagus/django-planet,matagus/django-planet,jilljenn/django-planet,jilljenn/django-planet
|
python
|
## Code Before:
from django.core.management.base import BaseCommand
from planet.management.commands import process_feed
from planet.models import Feed
from planet.signals import feeds_updated
class Command(BaseCommand):
"""
Command to add a complete blog feed to our db.
Usage:
./manage.py add_feed <feed_url>
"""
def handle(self, *args, **options):
for feed_url in Feed.site_objects.all().values_list("url", flat=True):
# process feed in create-mode
process_feed(feed_url, create=False)
feeds_updated.send(sender=self, instance=self)
## Instruction:
Print total number of posts added and total elapsed time
## Code After:
from datetime import datetime
from django.core.management.base import BaseCommand
from planet.management.commands import process_feed
from planet.models import Feed
from planet.signals import feeds_updated
class Command(BaseCommand):
"""
Command to add a complete blog feed to our db.
Usage:
./manage.py add_feed <feed_url>
"""
def handle(self, *args, **options):
new_posts_count = 0
start = datetime.now()
for feed_url in Feed.site_objects.all().values_list("url", flat=True):
# process feed in create-mode
new_posts_count += process_feed(feed_url, create=False)
delta = datetime.now() - start
print "Added %s posts in %d seconds" % (new_posts_count, delta.seconds)
feeds_updated.send(sender=self, instance=self)
|
// ... existing code ...
from datetime import datetime
from django.core.management.base import BaseCommand
from planet.management.commands import process_feed
// ... modified code ...
./manage.py add_feed <feed_url>
"""
def handle(self, *args, **options):
new_posts_count = 0
start = datetime.now()
for feed_url in Feed.site_objects.all().values_list("url", flat=True):
# process feed in create-mode
new_posts_count += process_feed(feed_url, create=False)
delta = datetime.now() - start
print "Added %s posts in %d seconds" % (new_posts_count, delta.seconds)
feeds_updated.send(sender=self, instance=self)
// ... rest of the code ...
|
36edb0e161fd3c65d2957b7b319b67975e846e7e
|
src/sentry/templatetags/sentry_assets.py
|
src/sentry/templatetags/sentry_assets.py
|
from __future__ import absolute_import
from django.template import Library
from sentry.utils.assets import get_asset_url
register = Library()
@register.simple_tag
def asset_url(module, path):
"""
Returns a versioned asset URL (located within Sentry's static files).
Example:
{% asset_url 'sentry' 'dist/sentry.css' %}
=> "/_static/74d127b78dc7daf2c51f/sentry/dist/sentry.css"
"""
return get_asset_url(module, path)
|
from __future__ import absolute_import
from django.template import Library
from sentry.utils.assets import get_asset_url
from sentry.utils.http import absolute_uri
register = Library()
@register.simple_tag
def asset_url(module, path):
"""
Returns a versioned asset URL (located within Sentry's static files).
Example:
{% asset_url 'sentry' 'dist/sentry.css' %}
=> "http://sentry.example.com/_static/74d127b78dc7daf2c51f/sentry/dist/sentry.css"
"""
return absolute_uri(get_asset_url(module, path))
|
Make all asset URLs absolute
|
Make all asset URLs absolute
|
Python
|
bsd-3-clause
|
mvaled/sentry,looker/sentry,JamesMura/sentry,zenefits/sentry,BuildingLink/sentry,gencer/sentry,ifduyue/sentry,BuildingLink/sentry,fotinakis/sentry,mitsuhiko/sentry,zenefits/sentry,fotinakis/sentry,gencer/sentry,looker/sentry,beeftornado/sentry,gencer/sentry,mitsuhiko/sentry,BayanGroup/sentry,fotinakis/sentry,daevaorn/sentry,daevaorn/sentry,mvaled/sentry,mvaled/sentry,zenefits/sentry,nicholasserra/sentry,JamesMura/sentry,beeftornado/sentry,imankulov/sentry,BayanGroup/sentry,mvaled/sentry,mvaled/sentry,looker/sentry,beeftornado/sentry,imankulov/sentry,zenefits/sentry,JackDanger/sentry,BayanGroup/sentry,ifduyue/sentry,alexm92/sentry,alexm92/sentry,jean/sentry,jean/sentry,fotinakis/sentry,zenefits/sentry,BuildingLink/sentry,nicholasserra/sentry,JamesMura/sentry,ifduyue/sentry,ifduyue/sentry,jean/sentry,daevaorn/sentry,imankulov/sentry,BuildingLink/sentry,mvaled/sentry,looker/sentry,looker/sentry,JamesMura/sentry,daevaorn/sentry,ifduyue/sentry,JackDanger/sentry,JackDanger/sentry,gencer/sentry,JamesMura/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,jean/sentry,alexm92/sentry,nicholasserra/sentry
|
python
|
## Code Before:
from __future__ import absolute_import
from django.template import Library
from sentry.utils.assets import get_asset_url
register = Library()
@register.simple_tag
def asset_url(module, path):
"""
Returns a versioned asset URL (located within Sentry's static files).
Example:
{% asset_url 'sentry' 'dist/sentry.css' %}
=> "/_static/74d127b78dc7daf2c51f/sentry/dist/sentry.css"
"""
return get_asset_url(module, path)
## Instruction:
Make all asset URLs absolute
## Code After:
from __future__ import absolute_import
from django.template import Library
from sentry.utils.assets import get_asset_url
from sentry.utils.http import absolute_uri
register = Library()
@register.simple_tag
def asset_url(module, path):
"""
Returns a versioned asset URL (located within Sentry's static files).
Example:
{% asset_url 'sentry' 'dist/sentry.css' %}
=> "http://sentry.example.com/_static/74d127b78dc7daf2c51f/sentry/dist/sentry.css"
"""
return absolute_uri(get_asset_url(module, path))
|
...
from django.template import Library
from sentry.utils.assets import get_asset_url
from sentry.utils.http import absolute_uri
register = Library()
...
Example:
{% asset_url 'sentry' 'dist/sentry.css' %}
=> "http://sentry.example.com/_static/74d127b78dc7daf2c51f/sentry/dist/sentry.css"
"""
return absolute_uri(get_asset_url(module, path))
...
|
1b502cdf399b5b9cd4593aea82750b77114fe858
|
examples/flask_hello.py
|
examples/flask_hello.py
|
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
|
import time
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/sleep')
def sleep():
time.sleep(0.1)
return 'Good morning!'
@app.route('/dosomething')
def do_something():
import requests
requests.get('http://google.com')
return 'Google says hello!'
|
Add some more endpoints to the flask example
|
Add some more endpoints to the flask example
|
Python
|
bsd-3-clause
|
joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument
|
python
|
## Code Before:
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
## Instruction:
Add some more endpoints to the flask example
## Code After:
import time
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/sleep')
def sleep():
time.sleep(0.1)
return 'Good morning!'
@app.route('/dosomething')
def do_something():
import requests
requests.get('http://google.com')
return 'Google says hello!'
|
// ... existing code ...
import time
from pyinstrument import Profiler
try:
// ... modified code ...
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/sleep')
def sleep():
time.sleep(0.1)
return 'Good morning!'
@app.route('/dosomething')
def do_something():
import requests
requests.get('http://google.com')
return 'Google says hello!'
// ... rest of the code ...
|
6978320135e27b7c13a90d26b2bf8b2ba990706d
|
contrib/other-builds/moses2/Search/SearchCubePruning.h
|
contrib/other-builds/moses2/Search/SearchCubePruning.h
|
/*
* SearchCubePruning.h
*
* Created on: 16 Nov 2015
* Author: hieu
*/
#pragma once
#include <vector>
#include <boost/unordered_map.hpp>
#include "Search.h"
class Bitmap;
class SearchCubePruning : public Search
{
public:
SearchCubePruning(Manager &mgr, Stacks &stacks);
virtual ~SearchCubePruning();
void Decode(size_t stackInd);
const Hypothesis *GetBestHypothesis() const;
protected:
boost::unordered_map<Bitmap*, std::vector<const Hypothesis*> > m_hyposPerBM;
};
|
/*
* SearchCubePruning.h
*
* Created on: 16 Nov 2015
* Author: hieu
*/
#ifndef SEARCH_SEARCHCUBEPRUNING_H_
#define SEARCH_SEARCHCUBEPRUNING_H_
#include "Search.h"
class SearchCubePruning : public Search
{
public:
SearchCubePruning(Manager &mgr, Stacks &stacks);
virtual ~SearchCubePruning();
void Decode(size_t stackInd);
const Hypothesis *GetBestHypothesis() const;
};
#endif /* SEARCH_SEARCHCUBEPRUNING_H_ */
|
Revert "for each input path.2"
|
Revert "for each input path.2"
This reverts commit 69e4cb0531ec93868ccc06ec54d52a4f88d62295.
|
C
|
lgpl-2.1
|
tofula/mosesdecoder,moses-smt/mosesdecoder,moses-smt/mosesdecoder,alvations/mosesdecoder,tofula/mosesdecoder,alvations/mosesdecoder,moses-smt/mosesdecoder,alvations/mosesdecoder,moses-smt/mosesdecoder,alvations/mosesdecoder,alvations/mosesdecoder,moses-smt/mosesdecoder,alvations/mosesdecoder,alvations/mosesdecoder,tofula/mosesdecoder,alvations/mosesdecoder,tofula/mosesdecoder,tofula/mosesdecoder,moses-smt/mosesdecoder,tofula/mosesdecoder,tofula/mosesdecoder,alvations/mosesdecoder,tofula/mosesdecoder,moses-smt/mosesdecoder,moses-smt/mosesdecoder,tofula/mosesdecoder,tofula/mosesdecoder,moses-smt/mosesdecoder,alvations/mosesdecoder,alvations/mosesdecoder,tofula/mosesdecoder,moses-smt/mosesdecoder,moses-smt/mosesdecoder
|
c
|
## Code Before:
/*
* SearchCubePruning.h
*
* Created on: 16 Nov 2015
* Author: hieu
*/
#pragma once
#include <vector>
#include <boost/unordered_map.hpp>
#include "Search.h"
class Bitmap;
class SearchCubePruning : public Search
{
public:
SearchCubePruning(Manager &mgr, Stacks &stacks);
virtual ~SearchCubePruning();
void Decode(size_t stackInd);
const Hypothesis *GetBestHypothesis() const;
protected:
boost::unordered_map<Bitmap*, std::vector<const Hypothesis*> > m_hyposPerBM;
};
## Instruction:
Revert "for each input path.2"
This reverts commit 69e4cb0531ec93868ccc06ec54d52a4f88d62295.
## Code After:
/*
* SearchCubePruning.h
*
* Created on: 16 Nov 2015
* Author: hieu
*/
#ifndef SEARCH_SEARCHCUBEPRUNING_H_
#define SEARCH_SEARCHCUBEPRUNING_H_
#include "Search.h"
class SearchCubePruning : public Search
{
public:
SearchCubePruning(Manager &mgr, Stacks &stacks);
virtual ~SearchCubePruning();
void Decode(size_t stackInd);
const Hypothesis *GetBestHypothesis() const;
};
#endif /* SEARCH_SEARCHCUBEPRUNING_H_ */
|
# ... existing code ...
* Author: hieu
*/
#ifndef SEARCH_SEARCHCUBEPRUNING_H_
#define SEARCH_SEARCHCUBEPRUNING_H_
#include "Search.h"
class SearchCubePruning : public Search
{
# ... modified code ...
const Hypothesis *GetBestHypothesis() const;
};
#endif /* SEARCH_SEARCHCUBEPRUNING_H_ */
# ... rest of the code ...
|
72e948719145579eb7dfb9385b921f8eb6ea1384
|
tests/v4/conftest.py
|
tests/v4/conftest.py
|
from .context import tohu
from tohu.v4.primitive_generators import *
from tohu.v4.derived_generators import *
__all__ = ['EXEMPLAR_GENERATORS', 'EXEMPLAR_PRIMITIVE_GENERATORS', 'EXEMPLAR_DERIVED_GENERATORS']
def add(x, y):
return x + y
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=6),
FakerGenerator(method="name"),
IterateOver('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcde', p=[0.1, 0.05, 0.7, 0.03, 0.12]),
Timestamp(date='2018-01-01'),
]
EXEMPLAR_DERIVED_GENERATORS = [
Apply(add, Integer(100, 200), Integer(300, 400)),
Apply(add, Apply(add, Integer(100, 200), Integer(300, 400)), Apply(add, Integer(500, 600), Integer(700, 800))),
]
EXEMPLAR_CUSTOM_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS + EXEMPLAR_CUSTOM_GENERATORS
|
from .context import tohu
from tohu.v4.primitive_generators import *
from tohu.v4.derived_generators import *
__all__ = ['EXEMPLAR_GENERATORS', 'EXEMPLAR_PRIMITIVE_GENERATORS', 'EXEMPLAR_DERIVED_GENERATORS']
def add(x, y):
return x + y
EXEMPLAR_PRIMITIVE_GENERATORS = [
Boolean(p=0.3),
Constant("quux"),
FakerGenerator(method="name"),
Float(12.34, 56.78),
HashDigest(length=6),
Integer(100, 200),
IterateOver('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcde', p=[0.1, 0.05, 0.7, 0.03, 0.12]),
Timestamp(date='2018-01-01'),
]
EXEMPLAR_DERIVED_GENERATORS = [
Apply(add, Integer(100, 200), Integer(300, 400)),
Apply(add, Apply(add, Integer(100, 200), Integer(300, 400)), Apply(add, Integer(500, 600), Integer(700, 800))),
]
EXEMPLAR_CUSTOM_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS + EXEMPLAR_CUSTOM_GENERATORS
|
Add more exemplar primitive generators
|
Add more exemplar primitive generators
|
Python
|
mit
|
maxalbert/tohu
|
python
|
## Code Before:
from .context import tohu
from tohu.v4.primitive_generators import *
from tohu.v4.derived_generators import *
__all__ = ['EXEMPLAR_GENERATORS', 'EXEMPLAR_PRIMITIVE_GENERATORS', 'EXEMPLAR_DERIVED_GENERATORS']
def add(x, y):
return x + y
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=6),
FakerGenerator(method="name"),
IterateOver('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcde', p=[0.1, 0.05, 0.7, 0.03, 0.12]),
Timestamp(date='2018-01-01'),
]
EXEMPLAR_DERIVED_GENERATORS = [
Apply(add, Integer(100, 200), Integer(300, 400)),
Apply(add, Apply(add, Integer(100, 200), Integer(300, 400)), Apply(add, Integer(500, 600), Integer(700, 800))),
]
EXEMPLAR_CUSTOM_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS + EXEMPLAR_CUSTOM_GENERATORS
## Instruction:
Add more exemplar primitive generators
## Code After:
from .context import tohu
from tohu.v4.primitive_generators import *
from tohu.v4.derived_generators import *
__all__ = ['EXEMPLAR_GENERATORS', 'EXEMPLAR_PRIMITIVE_GENERATORS', 'EXEMPLAR_DERIVED_GENERATORS']
def add(x, y):
return x + y
EXEMPLAR_PRIMITIVE_GENERATORS = [
Boolean(p=0.3),
Constant("quux"),
FakerGenerator(method="name"),
Float(12.34, 56.78),
HashDigest(length=6),
Integer(100, 200),
IterateOver('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcde', p=[0.1, 0.05, 0.7, 0.03, 0.12]),
Timestamp(date='2018-01-01'),
]
EXEMPLAR_DERIVED_GENERATORS = [
Apply(add, Integer(100, 200), Integer(300, 400)),
Apply(add, Apply(add, Integer(100, 200), Integer(300, 400)), Apply(add, Integer(500, 600), Integer(700, 800))),
]
EXEMPLAR_CUSTOM_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS + EXEMPLAR_CUSTOM_GENERATORS
|
...
EXEMPLAR_PRIMITIVE_GENERATORS = [
Boolean(p=0.3),
Constant("quux"),
FakerGenerator(method="name"),
Float(12.34, 56.78),
HashDigest(length=6),
Integer(100, 200),
IterateOver('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcde', p=[0.1, 0.05, 0.7, 0.03, 0.12]),
...
|
5e30bd1ae8218a6ad5a2582c15aed99258994d83
|
tests/tests/test_swappable_model.py
|
tests/tests/test_swappable_model.py
|
from django.test import TestCase
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, modify_settings
from boardinghouse.schema import get_schema_model
class TestSwappableModel(TestCase):
@modify_settings()
def test_schema_model_app_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo.bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_schema_model_model_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'boardinghouse.NotSchemaModel'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_invalid_schema_model_string(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo__bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
|
Write tests for swappable model.
|
Write tests for swappable model.
Resolves #28, #36.
--HG--
branch : fix-swappable-model
|
Python
|
bsd-3-clause
|
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
|
python
|
## Code Before:
from django.test import TestCase
## Instruction:
Write tests for swappable model.
Resolves #28, #36.
--HG--
branch : fix-swappable-model
## Code After:
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, modify_settings
from boardinghouse.schema import get_schema_model
class TestSwappableModel(TestCase):
@modify_settings()
def test_schema_model_app_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo.bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_schema_model_model_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'boardinghouse.NotSchemaModel'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_invalid_schema_model_string(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo__bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
|
...
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, modify_settings
from boardinghouse.schema import get_schema_model
class TestSwappableModel(TestCase):
@modify_settings()
def test_schema_model_app_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo.bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_schema_model_model_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'boardinghouse.NotSchemaModel'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_invalid_schema_model_string(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo__bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
...
|
55d9aba503d91466a109736544479a184b3ad95a
|
src/main/java/org/purescript/ide/inspections/PSUnresolvedReferenceInspection.kt
|
src/main/java/org/purescript/ide/inspections/PSUnresolvedReferenceInspection.kt
|
package org.purescript.ide.inspections
import com.intellij.codeInspection.LocalInspectionTool
import com.intellij.codeInspection.ProblemHighlightType
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.psi.PsiElement
import com.intellij.psi.PsiElementVisitor
import org.purescript.psi.exports.PSExportedModule
import org.purescript.psi.exports.PSExportedValue
class PSUnresolvedReferenceInspection : LocalInspectionTool() {
override fun buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor {
return object : PsiElementVisitor() {
override fun visitElement(element: PsiElement) {
super.visitElement(element)
when (element) {
is PSExportedValue -> {
if (element.reference.multiResolve(false).isEmpty()) {
holder.registerProblem(
element,
"Unresolved reference '${element.name}'",
ProblemHighlightType.LIKE_UNKNOWN_SYMBOL
)
}
}
is PSExportedModule -> {
if (element.reference.resolve() == null) {
holder.registerProblem(
element,
"Unresolved module '${element.name}'",
ProblemHighlightType.LIKE_UNKNOWN_SYMBOL
)
}
}
}
}
}
}
}
|
package org.purescript.ide.inspections
import com.intellij.codeInspection.LocalInspectionTool
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.psi.PsiElement
import com.intellij.psi.PsiElementVisitor
import com.intellij.psi.PsiReference
import org.purescript.psi.exports.PSExportedModule
import org.purescript.psi.exports.PSExportedValue
import org.purescript.psi.imports.PSImportDeclarationImpl
class PSUnresolvedReferenceInspection : LocalInspectionTool() {
override fun buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor {
return object : PsiElementVisitor() {
override fun visitElement(element: PsiElement) {
super.visitElement(element)
when (element) {
is PSExportedValue -> visitReference(element.reference)
is PSExportedModule -> visitReference(element.reference)
is PSImportDeclarationImpl -> visitReference(element.reference)
}
}
private fun visitReference(reference: PsiReference) {
if (reference.resolve() == null) {
holder.registerProblem(reference)
}
}
}
}
}
|
Implement inspection for unresolved module reference
|
Implement inspection for unresolved module reference
|
Kotlin
|
bsd-3-clause
|
intellij-purescript/intellij-purescript,intellij-purescript/intellij-purescript
|
kotlin
|
## Code Before:
package org.purescript.ide.inspections
import com.intellij.codeInspection.LocalInspectionTool
import com.intellij.codeInspection.ProblemHighlightType
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.psi.PsiElement
import com.intellij.psi.PsiElementVisitor
import org.purescript.psi.exports.PSExportedModule
import org.purescript.psi.exports.PSExportedValue
class PSUnresolvedReferenceInspection : LocalInspectionTool() {
override fun buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor {
return object : PsiElementVisitor() {
override fun visitElement(element: PsiElement) {
super.visitElement(element)
when (element) {
is PSExportedValue -> {
if (element.reference.multiResolve(false).isEmpty()) {
holder.registerProblem(
element,
"Unresolved reference '${element.name}'",
ProblemHighlightType.LIKE_UNKNOWN_SYMBOL
)
}
}
is PSExportedModule -> {
if (element.reference.resolve() == null) {
holder.registerProblem(
element,
"Unresolved module '${element.name}'",
ProblemHighlightType.LIKE_UNKNOWN_SYMBOL
)
}
}
}
}
}
}
}
## Instruction:
Implement inspection for unresolved module reference
## Code After:
package org.purescript.ide.inspections
import com.intellij.codeInspection.LocalInspectionTool
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.psi.PsiElement
import com.intellij.psi.PsiElementVisitor
import com.intellij.psi.PsiReference
import org.purescript.psi.exports.PSExportedModule
import org.purescript.psi.exports.PSExportedValue
import org.purescript.psi.imports.PSImportDeclarationImpl
class PSUnresolvedReferenceInspection : LocalInspectionTool() {
override fun buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor {
return object : PsiElementVisitor() {
override fun visitElement(element: PsiElement) {
super.visitElement(element)
when (element) {
is PSExportedValue -> visitReference(element.reference)
is PSExportedModule -> visitReference(element.reference)
is PSImportDeclarationImpl -> visitReference(element.reference)
}
}
private fun visitReference(reference: PsiReference) {
if (reference.resolve() == null) {
holder.registerProblem(reference)
}
}
}
}
}
|
// ... existing code ...
package org.purescript.ide.inspections
import com.intellij.codeInspection.LocalInspectionTool
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.psi.PsiElement
import com.intellij.psi.PsiElementVisitor
import com.intellij.psi.PsiReference
import org.purescript.psi.exports.PSExportedModule
import org.purescript.psi.exports.PSExportedValue
import org.purescript.psi.imports.PSImportDeclarationImpl
class PSUnresolvedReferenceInspection : LocalInspectionTool() {
override fun buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor {
// ... modified code ...
override fun visitElement(element: PsiElement) {
super.visitElement(element)
when (element) {
is PSExportedValue -> visitReference(element.reference)
is PSExportedModule -> visitReference(element.reference)
is PSImportDeclarationImpl -> visitReference(element.reference)
}
}
private fun visitReference(reference: PsiReference) {
if (reference.resolve() == null) {
holder.registerProblem(reference)
}
}
}
// ... rest of the code ...
|
1ea4e06fb3dc08a27a37b379e9ba2fffd5303625
|
ca_on_school_boards_english_public/__init__.py
|
ca_on_school_boards_english_public/__init__.py
|
from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Chair', label=division.name, division_id=division.id)
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Trustee (seat {})'.format(i), label=division.name, division_id=division.id)
yield organization
|
from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Chair', label=division.name, division_id=division.id)
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Trustee', label='{} (seat {})'.format(division.name, i), division_id=division.id)
yield organization
|
Fix where the seat number appears
|
Fix where the seat number appears
|
Python
|
mit
|
opencivicdata/scrapers-ca,opencivicdata/scrapers-ca
|
python
|
## Code Before:
from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Chair', label=division.name, division_id=division.id)
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Trustee (seat {})'.format(i), label=division.name, division_id=division.id)
yield organization
## Instruction:
Fix where the seat number appears
## Code After:
from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Chair', label=division.name, division_id=division.id)
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Trustee', label='{} (seat {})'.format(division.name, i), division_id=division.id)
yield organization
|
# ... existing code ...
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Chair', label=division.name, division_id=division.id)
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Trustee', label='{} (seat {})'.format(division.name, i), division_id=division.id)
yield organization
# ... rest of the code ...
|
50a2f78791baabcec413f3ad99130e8956cf4863
|
src/main/java/no/stelar7/api/r4j/pojo/val/matchlist/MatchReference.java
|
src/main/java/no/stelar7/api/r4j/pojo/val/matchlist/MatchReference.java
|
package no.stelar7.api.r4j.pojo.val.matchlist;
import java.io.Serializable;
import java.util.Objects;
public class MatchReference implements Serializable
{
private static final long serialVersionUID = -5301457261872587385L;
private String matchId;
private Long gameStartTime;
private String teamId;
public String getMatchId()
{
return matchId;
}
public Long getGameStartTime()
{
return gameStartTime;
}
public String getTeamId()
{
return teamId;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
MatchReference match = (MatchReference) o;
return Objects.equals(matchId, match.matchId) &&
Objects.equals(gameStartTime, match.gameStartTime) &&
Objects.equals(teamId, match.teamId);
}
@Override
public int hashCode()
{
return Objects.hash(matchId, gameStartTime, teamId);
}
@Override
public String toString()
{
return "Match{" +
"matchId='" + matchId + '\'' +
", gameStartTime=" + gameStartTime +
", teamId='" + teamId + '\'' +
'}';
}
}
|
package no.stelar7.api.r4j.pojo.val.matchlist;
import java.io.Serializable;
import java.util.Objects;
public class MatchReference implements Serializable
{
private static final long serialVersionUID = -5301457261872587385L;
private String matchId;
private Long gameStartTimeMillis;
private String queueId;
public String getMatchId()
{
return matchId;
}
public Long getGameStartTimeMillis()
{
return gameStartTimeMillis;
}
public String getQueueId()
{
return queueId;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
MatchReference match = (MatchReference) o;
return Objects.equals(matchId, match.matchId) &&
Objects.equals(gameStartTimeMillis, match.gameStartTimeMillis) &&
Objects.equals(queueId, match.queueId);
}
@Override
public int hashCode()
{
return Objects.hash(matchId, gameStartTimeMillis, queueId);
}
@Override
public String toString()
{
return "Match{" +
"matchId='" + matchId + '\'' +
", gameStartTimeMillis=" + gameStartTimeMillis +
", queueId='" + queueId + '\'' +
'}';
}
}
|
Fix property names to be related to response from riot valorant api
|
Fix property names to be related to response from riot valorant api
|
Java
|
apache-2.0
|
stelar7/L4J8
|
java
|
## Code Before:
package no.stelar7.api.r4j.pojo.val.matchlist;
import java.io.Serializable;
import java.util.Objects;
public class MatchReference implements Serializable
{
private static final long serialVersionUID = -5301457261872587385L;
private String matchId;
private Long gameStartTime;
private String teamId;
public String getMatchId()
{
return matchId;
}
public Long getGameStartTime()
{
return gameStartTime;
}
public String getTeamId()
{
return teamId;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
MatchReference match = (MatchReference) o;
return Objects.equals(matchId, match.matchId) &&
Objects.equals(gameStartTime, match.gameStartTime) &&
Objects.equals(teamId, match.teamId);
}
@Override
public int hashCode()
{
return Objects.hash(matchId, gameStartTime, teamId);
}
@Override
public String toString()
{
return "Match{" +
"matchId='" + matchId + '\'' +
", gameStartTime=" + gameStartTime +
", teamId='" + teamId + '\'' +
'}';
}
}
## Instruction:
Fix property names to be related to response from riot valorant api
## Code After:
package no.stelar7.api.r4j.pojo.val.matchlist;
import java.io.Serializable;
import java.util.Objects;
public class MatchReference implements Serializable
{
private static final long serialVersionUID = -5301457261872587385L;
private String matchId;
private Long gameStartTimeMillis;
private String queueId;
public String getMatchId()
{
return matchId;
}
public Long getGameStartTimeMillis()
{
return gameStartTimeMillis;
}
public String getQueueId()
{
return queueId;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
MatchReference match = (MatchReference) o;
return Objects.equals(matchId, match.matchId) &&
Objects.equals(gameStartTimeMillis, match.gameStartTimeMillis) &&
Objects.equals(queueId, match.queueId);
}
@Override
public int hashCode()
{
return Objects.hash(matchId, gameStartTimeMillis, queueId);
}
@Override
public String toString()
{
return "Match{" +
"matchId='" + matchId + '\'' +
", gameStartTimeMillis=" + gameStartTimeMillis +
", queueId='" + queueId + '\'' +
'}';
}
}
|
// ... existing code ...
public class MatchReference implements Serializable
{
private static final long serialVersionUID = -5301457261872587385L;
private String matchId;
private Long gameStartTimeMillis;
private String queueId;
public String getMatchId()
{
return matchId;
}
public Long getGameStartTimeMillis()
{
return gameStartTimeMillis;
}
public String getQueueId()
{
return queueId;
}
@Override
public boolean equals(Object o)
{
// ... modified code ...
}
MatchReference match = (MatchReference) o;
return Objects.equals(matchId, match.matchId) &&
Objects.equals(gameStartTimeMillis, match.gameStartTimeMillis) &&
Objects.equals(queueId, match.queueId);
}
@Override
public int hashCode()
{
return Objects.hash(matchId, gameStartTimeMillis, queueId);
}
@Override
public String toString()
{
return "Match{" +
"matchId='" + matchId + '\'' +
", gameStartTimeMillis=" + gameStartTimeMillis +
", queueId='" + queueId + '\'' +
'}';
}
}
// ... rest of the code ...
|
52c97feecb4e89e7948572c9c6e93735fb84c776
|
asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/QueryResult.java
|
asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/QueryResult.java
|
/*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.Query;
public class QueryResult {
private final Query query;
private final String resultPath;
public QueryResult(Query statement, String resultPath) {
this.query = statement;
this.resultPath = resultPath;
}
public Statement getStatement() {
return query;
}
public String getResultPath() {
return resultPath;
}
}
|
/*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.Query;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.job.JobId;
public class QueryResult {
private final Query query;
private final ResultSetId resultSetId;
private JobId jobId;
public QueryResult(Query statement, ResultSetId resultSetId) {
this.query = statement;
this.resultSetId = resultSetId;
}
public void setJobId(JobId jobId) {
this.jobId = jobId;
}
public JobId getJobId() {
return jobId;
}
public Statement getStatement() {
return query;
}
public ResultSetId getResultId() {
return resultSetId;
}
}
|
Store and implement the necessary data and methods for storing information about the job and result set id.
|
Store and implement the necessary data and methods for storing information about the job and result set id.
git-svn-id: d02d9bbead102d27982d5b07d77674fa86f521a4@1142 eaa15691-b419-025a-1212-ee371bd00084
|
Java
|
apache-2.0
|
kisskys/incubator-asterixdb,amoudi87/asterixdb,sjaco002/incubator-asterixdb,ty1er/incubator-asterixdb,ecarm002/incubator-asterixdb,apache/incubator-asterixdb,heriram/incubator-asterixdb,parshimers/incubator-asterixdb,heriram/incubator-asterixdb,waans11/incubator-asterixdb,ty1er/incubator-asterixdb,sjaco002/incubator-asterixdb,heriram/incubator-asterixdb,kisskys/incubator-asterixdb,ty1er/incubator-asterixdb,amoudi87/asterixdb,ecarm002/incubator-asterixdb,ecarm002/incubator-asterixdb,parshimers/incubator-asterixdb,parshimers/incubator-asterixdb,amoudi87/asterixdb,waans11/incubator-asterixdb,kisskys/incubator-asterixdb,sjaco002/incubator-asterixdb,parshimers/incubator-asterixdb,waans11/incubator-asterixdb,ecarm002/incubator-asterixdb,waans11/incubator-asterixdb,heriram/incubator-asterixdb,heriram/incubator-asterixdb,kisskys/incubator-asterixdb,sjaco002/incubator-asterixdb,kisskys/incubator-asterixdb,apache/incubator-asterixdb,parshimers/incubator-asterixdb,apache/incubator-asterixdb,heriram/incubator-asterixdb,sjaco002/incubator-asterixdb,ecarm002/incubator-asterixdb,apache/incubator-asterixdb,apache/incubator-asterixdb,apache/incubator-asterixdb,heriram/incubator-asterixdb,waans11/incubator-asterixdb,waans11/incubator-asterixdb,kisskys/incubator-asterixdb,ecarm002/incubator-asterixdb,waans11/incubator-asterixdb,ty1er/incubator-asterixdb,sjaco002/incubator-asterixdb,amoudi87/asterixdb,apache/incubator-asterixdb,amoudi87/asterixdb,ty1er/incubator-asterixdb,ecarm002/incubator-asterixdb,kisskys/incubator-asterixdb,amoudi87/asterixdb,ty1er/incubator-asterixdb,parshimers/incubator-asterixdb
|
java
|
## Code Before:
/*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.Query;
public class QueryResult {
private final Query query;
private final String resultPath;
public QueryResult(Query statement, String resultPath) {
this.query = statement;
this.resultPath = resultPath;
}
public Statement getStatement() {
return query;
}
public String getResultPath() {
return resultPath;
}
}
## Instruction:
Store and implement the necessary data and methods for storing information about the job and result set id.
git-svn-id: d02d9bbead102d27982d5b07d77674fa86f521a4@1142 eaa15691-b419-025a-1212-ee371bd00084
## Code After:
/*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.Query;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.job.JobId;
public class QueryResult {
private final Query query;
private final ResultSetId resultSetId;
private JobId jobId;
public QueryResult(Query statement, ResultSetId resultSetId) {
this.query = statement;
this.resultSetId = resultSetId;
}
public void setJobId(JobId jobId) {
this.jobId = jobId;
}
public JobId getJobId() {
return jobId;
}
public Statement getStatement() {
return query;
}
public ResultSetId getResultId() {
return resultSetId;
}
}
|
# ... existing code ...
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.Query;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.job.JobId;
public class QueryResult {
private final Query query;
private final ResultSetId resultSetId;
private JobId jobId;
public QueryResult(Query statement, ResultSetId resultSetId) {
this.query = statement;
this.resultSetId = resultSetId;
}
public void setJobId(JobId jobId) {
this.jobId = jobId;
}
public JobId getJobId() {
return jobId;
}
public Statement getStatement() {
# ... modified code ...
return query;
}
public ResultSetId getResultId() {
return resultSetId;
}
}
# ... rest of the code ...
|
8f0befc2bd6e42c544e30630a82fdcec291dfe1f
|
judge/telerik_academy_auth.py
|
judge/telerik_academy_auth.py
|
from django.contrib.auth.models import User
from dmoj import settings
import json
import requests
from judge.models import Profile, Language
class RemoteUserBackend (object):
def get_login_url(self, api_key, username, password):
return 'https://telerikacademy.com/Api/Users/CheckUserLogin?apiKey=%s&usernameoremail=%s&password=%s' % (api_key, username, password)
def authenticate(self, username=None, password=None):
# Telerik Academy Authentication
r = requests.post(self.get_login_url(settings.API_KEY, username, password))
result = json.loads(r.content)
if result['IsValid']:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username)
user.save()
profile, _ = Profile.objects.get_or_create(user=user, defaults={
'language': Language.get_python2(),
'timezone': 'Europe/Sofia',
})
profile.name = username
profile.save()
if result['IsAdmin']:
user.is_staff = True
user.is_superuser = True
else:
user.is_staff = False
user.is_superuser = False
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.contrib.auth.models import User
from dmoj import settings
import json
import requests
from judge.models import Profile, Language
class RemoteUserBackend (object):
def get_login_url(self, api_key, username, password):
return 'https://telerikacademy.com/Api/Users/CheckUserLogin?apiKey=%s&usernameoremail=%s&password=%s' % (api_key, username, password)
def authenticate(self, username=None, password=None):
# Telerik Academy Authentication
r = requests.post(self.get_login_url(settings.API_KEY, username, password))
result = json.loads(r.content)
if result['IsValid']:
real_username = result['UserName']
try:
user = User.objects.get(username=real_username)
except User.DoesNotExist:
user = User(username=real_username)
user.save()
profile, _ = Profile.objects.get_or_create(user=user, defaults={
'language': Language.get_python2(),
'timezone': 'Europe/Sofia',
})
profile.name = real_username
profile.save()
if result['IsAdmin']:
user.is_staff = True
user.is_superuser = True
else:
user.is_staff = False
user.is_superuser = False
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Use username provided by telerik academy auth API
|
Use username provided by telerik academy auth API
|
Python
|
agpl-3.0
|
Minkov/site,Minkov/site,Minkov/site,Minkov/site
|
python
|
## Code Before:
from django.contrib.auth.models import User
from dmoj import settings
import json
import requests
from judge.models import Profile, Language
class RemoteUserBackend (object):
def get_login_url(self, api_key, username, password):
return 'https://telerikacademy.com/Api/Users/CheckUserLogin?apiKey=%s&usernameoremail=%s&password=%s' % (api_key, username, password)
def authenticate(self, username=None, password=None):
# Telerik Academy Authentication
r = requests.post(self.get_login_url(settings.API_KEY, username, password))
result = json.loads(r.content)
if result['IsValid']:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username)
user.save()
profile, _ = Profile.objects.get_or_create(user=user, defaults={
'language': Language.get_python2(),
'timezone': 'Europe/Sofia',
})
profile.name = username
profile.save()
if result['IsAdmin']:
user.is_staff = True
user.is_superuser = True
else:
user.is_staff = False
user.is_superuser = False
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
## Instruction:
Use username provided by telerik academy auth API
## Code After:
from django.contrib.auth.models import User
from dmoj import settings
import json
import requests
from judge.models import Profile, Language
class RemoteUserBackend (object):
def get_login_url(self, api_key, username, password):
return 'https://telerikacademy.com/Api/Users/CheckUserLogin?apiKey=%s&usernameoremail=%s&password=%s' % (api_key, username, password)
def authenticate(self, username=None, password=None):
# Telerik Academy Authentication
r = requests.post(self.get_login_url(settings.API_KEY, username, password))
result = json.loads(r.content)
if result['IsValid']:
real_username = result['UserName']
try:
user = User.objects.get(username=real_username)
except User.DoesNotExist:
user = User(username=real_username)
user.save()
profile, _ = Profile.objects.get_or_create(user=user, defaults={
'language': Language.get_python2(),
'timezone': 'Europe/Sofia',
})
profile.name = real_username
profile.save()
if result['IsAdmin']:
user.is_staff = True
user.is_superuser = True
else:
user.is_staff = False
user.is_superuser = False
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
// ... existing code ...
result = json.loads(r.content)
if result['IsValid']:
real_username = result['UserName']
try:
user = User.objects.get(username=real_username)
except User.DoesNotExist:
user = User(username=real_username)
user.save()
// ... modified code ...
'timezone': 'Europe/Sofia',
})
profile.name = real_username
profile.save()
if result['IsAdmin']:
// ... rest of the code ...
|
f34afd52aa1b89163d90a21feac7bd9e3425639d
|
gapipy/resources/booking/agency_chain.py
|
gapipy/resources/booking/agency_chain.py
|
from __future__ import unicode_literals
from gapipy.resources.base import Resource
from gapipy.resources.booking_company import BookingCompany
class AgencyChain(Resource):
_resource_name = 'agency_chains'
_as_is_fields = [
'id',
'href',
'name',
'agencies',
'agent_notifications',
'communication_preferences',
'flags',
'payment_options',
'passenger_notifications',
]
_date_time_fields_local = [
'date_created',
'date_last_modified',
]
_resource_fields = [
('booking_company', BookingCompany),
]
|
from __future__ import unicode_literals
from gapipy.resources.base import Resource
from gapipy.resources.booking_company import BookingCompany
class AgencyChain(Resource):
_resource_name = 'agency_chains'
_is_parent_resource = True
_as_is_fields = [
'id',
'href',
'name',
'agent_notifications',
'communication_preferences',
'flags',
'payment_options',
'passenger_notifications',
]
_date_time_fields_local = [
'date_created',
'date_last_modified',
]
_resource_fields = [
('booking_company', BookingCompany),
]
_resource_collection_fields = [('agencies', 'Agency')]
|
Make `AgencyChain.agencies` a "resource collection" field
|
Make `AgencyChain.agencies` a "resource collection" field
We can now get a `Query` when accessing the `agencies` attribute on an
`AgencyChain` instead of a dict with an URI to the agencies list.
|
Python
|
mit
|
gadventures/gapipy
|
python
|
## Code Before:
from __future__ import unicode_literals
from gapipy.resources.base import Resource
from gapipy.resources.booking_company import BookingCompany
class AgencyChain(Resource):
_resource_name = 'agency_chains'
_as_is_fields = [
'id',
'href',
'name',
'agencies',
'agent_notifications',
'communication_preferences',
'flags',
'payment_options',
'passenger_notifications',
]
_date_time_fields_local = [
'date_created',
'date_last_modified',
]
_resource_fields = [
('booking_company', BookingCompany),
]
## Instruction:
Make `AgencyChain.agencies` a "resource collection" field
We can now get a `Query` when accessing the `agencies` attribute on an
`AgencyChain` instead of a dict with an URI to the agencies list.
## Code After:
from __future__ import unicode_literals
from gapipy.resources.base import Resource
from gapipy.resources.booking_company import BookingCompany
class AgencyChain(Resource):
_resource_name = 'agency_chains'
_is_parent_resource = True
_as_is_fields = [
'id',
'href',
'name',
'agent_notifications',
'communication_preferences',
'flags',
'payment_options',
'passenger_notifications',
]
_date_time_fields_local = [
'date_created',
'date_last_modified',
]
_resource_fields = [
('booking_company', BookingCompany),
]
_resource_collection_fields = [('agencies', 'Agency')]
|
// ... existing code ...
class AgencyChain(Resource):
_resource_name = 'agency_chains'
_is_parent_resource = True
_as_is_fields = [
'id',
'href',
'name',
'agent_notifications',
'communication_preferences',
'flags',
// ... modified code ...
_resource_fields = [
('booking_company', BookingCompany),
]
_resource_collection_fields = [('agencies', 'Agency')]
// ... rest of the code ...
|
02d67008d0f0bdc205ca9168384c4a951c106a28
|
nintendo/common/transport.py
|
nintendo/common/transport.py
|
import socket
class Socket:
TCP = 0
UDP = 1
def __init__(self, type):
if type == self.TCP:
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
else:
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.s.setblocking(False)
def connect(self, host, port): self.s.connect((host, port))
def close(self): self.s.close()
def send(self, data): self.s.sendall(data)
def recv(self, num):
try:
return self.s.recv(num)
except BlockingIOError:
pass
def get_address(self): return self.s.getsockname()[0]
def get_port(self): return self.s.getsockname()[1]
|
import socket
class Socket:
TCP = 0
UDP = 1
def __init__(self, type):
if type == self.TCP:
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
else:
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.s.setblocking(False)
def connect(self, host, port): self.s.connect((host, port))
def close(self): self.s.close()
def send(self, data): self.s.sendall(data)
def recv(self, num):
try:
return self.s.recv(num)
except BlockingIOError:
pass
def bind(self, addr=("", 0)): self.s.bind(addr)
def sendto(self, data, addr): self.s.sendto(data, addr)
def recvfrom(self, num):
try:
return self.s.recvfrom(num)
except BlockingIOError:
return None, None
def get_address(self): return self.s.getsockname()[0]
def get_port(self): return self.s.getsockname()[1]
|
Add a few functions to Socket class
|
Add a few functions to Socket class
|
Python
|
mit
|
Kinnay/NintendoClients
|
python
|
## Code Before:
import socket
class Socket:
TCP = 0
UDP = 1
def __init__(self, type):
if type == self.TCP:
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
else:
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.s.setblocking(False)
def connect(self, host, port): self.s.connect((host, port))
def close(self): self.s.close()
def send(self, data): self.s.sendall(data)
def recv(self, num):
try:
return self.s.recv(num)
except BlockingIOError:
pass
def get_address(self): return self.s.getsockname()[0]
def get_port(self): return self.s.getsockname()[1]
## Instruction:
Add a few functions to Socket class
## Code After:
import socket
class Socket:
TCP = 0
UDP = 1
def __init__(self, type):
if type == self.TCP:
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
else:
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.s.setblocking(False)
def connect(self, host, port): self.s.connect((host, port))
def close(self): self.s.close()
def send(self, data): self.s.sendall(data)
def recv(self, num):
try:
return self.s.recv(num)
except BlockingIOError:
pass
def bind(self, addr=("", 0)): self.s.bind(addr)
def sendto(self, data, addr): self.s.sendto(data, addr)
def recvfrom(self, num):
try:
return self.s.recvfrom(num)
except BlockingIOError:
return None, None
def get_address(self): return self.s.getsockname()[0]
def get_port(self): return self.s.getsockname()[1]
|
...
except BlockingIOError:
pass
def bind(self, addr=("", 0)): self.s.bind(addr)
def sendto(self, data, addr): self.s.sendto(data, addr)
def recvfrom(self, num):
try:
return self.s.recvfrom(num)
except BlockingIOError:
return None, None
def get_address(self): return self.s.getsockname()[0]
def get_port(self): return self.s.getsockname()[1]
...
|
c12b0e1c3253f17b71cbb4095c83c73d0f2ea188
|
setup.py
|
setup.py
|
from setuptools import setup
with open('reqs/production.txt') as f:
_requirements = f.read().splitlines()
setup(
name='rtrss',
version='0.3',
author='notapresent',
author_email='[email protected]',
url='https://github.com/notapresent/rtrss',
description='RSS feeds for popular bittorrent tracker',
long_description=__doc__,
license='Apache 2.0',
download_url='https://github.com/notapresent/rtrss/archive/master.zip',
install_requires=_requirements,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Flask',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
]
)
|
from setuptools import setup
with open('reqs/production.txt') as f:
_requirements = f.read().splitlines()
setup(
name='rtrss',
version='0.3',
author='notapresent',
author_email='[email protected]',
url='https://github.com/notapresent/rtrss',
description='RSS feeds for popular bittorrent tracker',
long_description=__doc__,
license='Apache 2.0',
download_url='https://github.com/notapresent/rtrss/archive/master.zip',
install_requires=_requirements,
entry_points={
'console_scripts': [
'rtrssmgr = rtrss.worker:main',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Flask',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
]
)
|
Add rtrssmgr command entry point
|
Add rtrssmgr command entry point
|
Python
|
apache-2.0
|
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
|
python
|
## Code Before:
from setuptools import setup
with open('reqs/production.txt') as f:
_requirements = f.read().splitlines()
setup(
name='rtrss',
version='0.3',
author='notapresent',
author_email='[email protected]',
url='https://github.com/notapresent/rtrss',
description='RSS feeds for popular bittorrent tracker',
long_description=__doc__,
license='Apache 2.0',
download_url='https://github.com/notapresent/rtrss/archive/master.zip',
install_requires=_requirements,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Flask',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
]
)
## Instruction:
Add rtrssmgr command entry point
## Code After:
from setuptools import setup
with open('reqs/production.txt') as f:
_requirements = f.read().splitlines()
setup(
name='rtrss',
version='0.3',
author='notapresent',
author_email='[email protected]',
url='https://github.com/notapresent/rtrss',
description='RSS feeds for popular bittorrent tracker',
long_description=__doc__,
license='Apache 2.0',
download_url='https://github.com/notapresent/rtrss/archive/master.zip',
install_requires=_requirements,
entry_points={
'console_scripts': [
'rtrssmgr = rtrss.worker:main',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Flask',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
]
)
|
// ... existing code ...
license='Apache 2.0',
download_url='https://github.com/notapresent/rtrss/archive/master.zip',
install_requires=_requirements,
entry_points={
'console_scripts': [
'rtrssmgr = rtrss.worker:main',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
// ... rest of the code ...
|
c92d9c6da02dacdd91a21c3c5675940154c0e21a
|
cla_backend/apps/reports/db/backend/base.py
|
cla_backend/apps/reports/db/backend/base.py
|
from django.db.backends.postgresql_psycopg2.base import * # noqa
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = None
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
from django.db.backends.postgresql_psycopg2.base import * # noqa
import pytz
def local_tzinfo_factory(offset):
'''
Create a tzinfo object using the offset of the db connection. This ensures
that the datetimes returned are timezone aware and will be printed in the
reports with timezone information.
'''
return pytz.FixedOffset(offset)
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = local_tzinfo_factory
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
Add a tzinfo factory method to replica connection to create local tzinfos
|
Add a tzinfo factory method to replica connection to create local tzinfos
This is to ensure that the datetimes returned for report generation
are timezone aware and will thus be printed in the reports with
timezone information.
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
python
|
## Code Before:
from django.db.backends.postgresql_psycopg2.base import * # noqa
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = None
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
## Instruction:
Add a tzinfo factory method to replica connection to create local tzinfos
This is to ensure that the datetimes returned for report generation
are timezone aware and will thus be printed in the reports with
timezone information.
## Code After:
from django.db.backends.postgresql_psycopg2.base import * # noqa
import pytz
def local_tzinfo_factory(offset):
'''
Create a tzinfo object using the offset of the db connection. This ensures
that the datetimes returned are timezone aware and will be printed in the
reports with timezone information.
'''
return pytz.FixedOffset(offset)
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
'''
This exists to allow report generation SQL to set the time zone of the
connection without interference from Django, which normally tries to
ensure that all connections are UTC if `USE_TZ` is `True`.
'''
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = local_tzinfo_factory
return cursor
DatabaseWrapper = DynamicTimezoneDatabaseWrapper
|
# ... existing code ...
from django.db.backends.postgresql_psycopg2.base import * # noqa
import pytz
def local_tzinfo_factory(offset):
'''
Create a tzinfo object using the offset of the db connection. This ensures
that the datetimes returned are timezone aware and will be printed in the
reports with timezone information.
'''
return pytz.FixedOffset(offset)
class DynamicTimezoneDatabaseWrapper(DatabaseWrapper):
# ... modified code ...
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = local_tzinfo_factory
return cursor
# ... rest of the code ...
|
6ccc85832aeff2ca9800cd9e2af8461515ff680d
|
cartography/midi_utils.py
|
cartography/midi_utils.py
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name())
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
Add dump presets and utils
|
Add dump presets and utils
|
Python
|
mit
|
tingled/synthetic-cartography,tingled/synthetic-cartography
|
python
|
## Code Before:
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name())
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
## Instruction:
Add dump presets and utils
## Code After:
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
// ... existing code ...
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
// ... rest of the code ...
|
cf3ff4d78a9a64c0c0e8d274ca36f68e9290b463
|
tests/seattle_benchmark.py
|
tests/seattle_benchmark.py
|
from transit.reader import JsonUnmarshaler
import json
import time
from StringIO import StringIO
def run_tests(data):
datas = StringIO(data)
t = time.time()
JsonUnmarshaler().load(datas)
et = time.time()
datas = StringIO(data)
tt = time.time()
json.load(datas)
ett = time.time()
print "Done: " + str((et - t) * 1000.0) + " -- raw JSON in: " + str((ett - tt) * 1000.0)
fd = open("../transit/seattle-data0.tjs", 'r')
data = fd.read()
fd.close()
for x in range(100):
run_tests(data)
|
from transit.reader import JsonUnmarshaler
import json
import time
from StringIO import StringIO
def run_tests(data):
datas = StringIO(data)
t = time.time()
JsonUnmarshaler().load(datas)
et = time.time()
datas = StringIO(data)
tt = time.time()
json.load(datas)
ett = time.time()
read_delta = (et - t) * 1000.0
print "Done: " + str(read_delta) + " -- raw JSON in: " + str((ett - tt) * 1000.0)
return read_delta
fd = open("../transit/seattle-data0.tjs", 'r')
data = fd.read()
fd.close()
runs = 100
deltas = [run_tests(data) for x in range(runs)]
print "\nMean: "+str(sum(deltas)/runs)
|
Update Seattle to print the mean at the end
|
Update Seattle to print the mean at the end
|
Python
|
apache-2.0
|
cognitect/transit-python,cognitect/transit-python,dand-oss/transit-python,dand-oss/transit-python
|
python
|
## Code Before:
from transit.reader import JsonUnmarshaler
import json
import time
from StringIO import StringIO
def run_tests(data):
datas = StringIO(data)
t = time.time()
JsonUnmarshaler().load(datas)
et = time.time()
datas = StringIO(data)
tt = time.time()
json.load(datas)
ett = time.time()
print "Done: " + str((et - t) * 1000.0) + " -- raw JSON in: " + str((ett - tt) * 1000.0)
fd = open("../transit/seattle-data0.tjs", 'r')
data = fd.read()
fd.close()
for x in range(100):
run_tests(data)
## Instruction:
Update Seattle to print the mean at the end
## Code After:
from transit.reader import JsonUnmarshaler
import json
import time
from StringIO import StringIO
def run_tests(data):
datas = StringIO(data)
t = time.time()
JsonUnmarshaler().load(datas)
et = time.time()
datas = StringIO(data)
tt = time.time()
json.load(datas)
ett = time.time()
read_delta = (et - t) * 1000.0
print "Done: " + str(read_delta) + " -- raw JSON in: " + str((ett - tt) * 1000.0)
return read_delta
fd = open("../transit/seattle-data0.tjs", 'r')
data = fd.read()
fd.close()
runs = 100
deltas = [run_tests(data) for x in range(runs)]
print "\nMean: "+str(sum(deltas)/runs)
|
...
tt = time.time()
json.load(datas)
ett = time.time()
read_delta = (et - t) * 1000.0
print "Done: " + str(read_delta) + " -- raw JSON in: " + str((ett - tt) * 1000.0)
return read_delta
fd = open("../transit/seattle-data0.tjs", 'r')
data = fd.read()
fd.close()
runs = 100
deltas = [run_tests(data) for x in range(runs)]
print "\nMean: "+str(sum(deltas)/runs)
...
|
8dc991a842d10e858b014dc91ae2897d05f9f7ca
|
src/test/kotlin/mixit/integration/AbstractIntegrationTests.kt
|
src/test/kotlin/mixit/integration/AbstractIntegrationTests.kt
|
package mixit.integration
import org.junit.Before
import org.junit.runner.RunWith
import org.springframework.boot.context.embedded.LocalServerPort
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment
import org.springframework.test.context.junit4.SpringRunner
import org.springframework.web.reactive.function.client.WebClient
@RunWith(SpringRunner::class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
abstract class AbstractIntegrationTests {
@LocalServerPort
var port: Int? = null
lateinit var client: WebClient
@Before
fun setup() {
client = WebClient.create("http://localhost:$port")
}
}
|
package mixit.integration
import org.junit.Before
import org.junit.runner.RunWith
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment
import org.springframework.boot.web.server.LocalServerPort
import org.springframework.test.context.junit4.SpringRunner
import org.springframework.web.reactive.function.client.WebClient
@RunWith(SpringRunner::class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
abstract class AbstractIntegrationTests {
@LocalServerPort
var port: Int? = null
lateinit var client: WebClient
@Before
fun setup() {
client = WebClient.create("http://localhost:$port")
}
}
|
Update LocalServerPort against latest Boot snapshots
|
Update LocalServerPort against latest Boot snapshots
|
Kotlin
|
apache-2.0
|
mix-it/mixit,mix-it/mixit,mixitconf/mixit,sdeleuze/mixit,mixitconf/mixit,mix-it/mixit,mixitconf/mixit,mix-it/mixit,sdeleuze/mixit,sdeleuze/mixit,mixitconf/mixit,mix-it/mixit,sdeleuze/mixit
|
kotlin
|
## Code Before:
package mixit.integration
import org.junit.Before
import org.junit.runner.RunWith
import org.springframework.boot.context.embedded.LocalServerPort
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment
import org.springframework.test.context.junit4.SpringRunner
import org.springframework.web.reactive.function.client.WebClient
@RunWith(SpringRunner::class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
abstract class AbstractIntegrationTests {
@LocalServerPort
var port: Int? = null
lateinit var client: WebClient
@Before
fun setup() {
client = WebClient.create("http://localhost:$port")
}
}
## Instruction:
Update LocalServerPort against latest Boot snapshots
## Code After:
package mixit.integration
import org.junit.Before
import org.junit.runner.RunWith
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment
import org.springframework.boot.web.server.LocalServerPort
import org.springframework.test.context.junit4.SpringRunner
import org.springframework.web.reactive.function.client.WebClient
@RunWith(SpringRunner::class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
abstract class AbstractIntegrationTests {
@LocalServerPort
var port: Int? = null
lateinit var client: WebClient
@Before
fun setup() {
client = WebClient.create("http://localhost:$port")
}
}
|
# ... existing code ...
import org.junit.Before
import org.junit.runner.RunWith
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment
import org.springframework.boot.web.server.LocalServerPort
import org.springframework.test.context.junit4.SpringRunner
import org.springframework.web.reactive.function.client.WebClient
# ... rest of the code ...
|
6cf7217147f6a555354861df03e9e1421453fdc4
|
sample/src/main/java/com/jenzz/buildconstants/sample/MainActivity.java
|
sample/src/main/java/com/jenzz/buildconstants/sample/MainActivity.java
|
package com.jenzz.buildconstants.sample;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
}
|
package com.jenzz.buildconstants.sample;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@SuppressWarnings("unused")
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Java constants
String javaString = SampleBuildConstants.ASTRING;
boolean javaBoolean = SampleBuildConstants.ABOOLEAN;
int javaNumber = SampleBuildConstants.ANUMBER;
// XML constants
Resources res = getResources();
String xmlString = res.getString(R.string.astring);
boolean xmlBoolean = res.getBoolean(R.bool.aboolean);
int xmlNumber = res.getInteger(R.integer.anumber);
}
}
|
Use generated constants in sample project
|
Use generated constants in sample project
|
Java
|
mit
|
jenzz/gradle-android-buildconstants-plugin
|
java
|
## Code Before:
package com.jenzz.buildconstants.sample;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
}
## Instruction:
Use generated constants in sample project
## Code After:
package com.jenzz.buildconstants.sample;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@SuppressWarnings("unused")
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Java constants
String javaString = SampleBuildConstants.ASTRING;
boolean javaBoolean = SampleBuildConstants.ABOOLEAN;
int javaNumber = SampleBuildConstants.ANUMBER;
// XML constants
Resources res = getResources();
String xmlString = res.getString(R.string.astring);
boolean xmlBoolean = res.getBoolean(R.bool.aboolean);
int xmlNumber = res.getInteger(R.integer.anumber);
}
}
|
# ... existing code ...
package com.jenzz.buildconstants.sample;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@SuppressWarnings("unused")
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Java constants
String javaString = SampleBuildConstants.ASTRING;
boolean javaBoolean = SampleBuildConstants.ABOOLEAN;
int javaNumber = SampleBuildConstants.ANUMBER;
// XML constants
Resources res = getResources();
String xmlString = res.getString(R.string.astring);
boolean xmlBoolean = res.getBoolean(R.bool.aboolean);
int xmlNumber = res.getInteger(R.integer.anumber);
}
}
# ... rest of the code ...
|
fc036a2cc7bd3200d98ed833343e116f4ce32bf1
|
kitchen/text/exceptions.py
|
kitchen/text/exceptions.py
|
from kitchen import exceptions
class XmlEncodeError(exceptions.KitchenException):
'''Exception thrown by error conditions when encoding an xml string.
'''
pass
|
from kitchen import exceptions
class XmlEncodeError(exceptions.KitchenException):
'''Exception thrown by error conditions when encoding an xml string.
'''
pass
class ControlCharError(exceptions.KitchenException):
'''Exception thrown when an ascii control character is encountered.
'''
pass
|
Add ControlCharError for process_control_chars function
|
Add ControlCharError for process_control_chars function
|
Python
|
lgpl-2.1
|
fedora-infra/kitchen,fedora-infra/kitchen
|
python
|
## Code Before:
from kitchen import exceptions
class XmlEncodeError(exceptions.KitchenException):
'''Exception thrown by error conditions when encoding an xml string.
'''
pass
## Instruction:
Add ControlCharError for process_control_chars function
## Code After:
from kitchen import exceptions
class XmlEncodeError(exceptions.KitchenException):
'''Exception thrown by error conditions when encoding an xml string.
'''
pass
class ControlCharError(exceptions.KitchenException):
'''Exception thrown when an ascii control character is encountered.
'''
pass
|
...
'''Exception thrown by error conditions when encoding an xml string.
'''
pass
class ControlCharError(exceptions.KitchenException):
'''Exception thrown when an ascii control character is encountered.
'''
pass
...
|
bac7bc1bb9663adebe0c1768d67c4ed1d1f452fc
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
Remove support for Python 2
|
Remove support for Python 2
This commit will prevent pip from installing natsort on any Python
version older than 3.4.
|
Python
|
mit
|
SethMMorton/natsort
|
python
|
## Code Before:
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
## Instruction:
Remove support for Python 2
This commit will prevent pip from installing natsort on any Python
version older than 3.4.
## Code After:
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
// ... existing code ...
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
// ... rest of the code ...
|
49154c02a7cf518675a3b0d448bd79d62dfa4b00
|
proto-actor/src/main/kotlin/actor/proto/DeferredProcess.kt
|
proto-actor/src/main/kotlin/actor/proto/DeferredProcess.kt
|
package actor.proto
import actor.proto.mailbox.SystemMessage
import kotlinx.coroutines.experimental.CompletableDeferred
import kotlinx.coroutines.experimental.withTimeout
import java.time.Duration
import java.util.concurrent.TimeUnit
class DeferredProcess<out T>(private val timeout: Duration = Duration.ofMillis(5000)) : Process() {
val pid = ProcessRegistry.put(ProcessRegistry.nextId(), this)
private val cd = CompletableDeferred<T>()
override fun sendUserMessage(pid: PID, message: Any) {
val m = when (message) {
is MessageEnvelope -> message.message
else -> message
}
@Suppress("UNCHECKED_CAST")
cd.complete(m as T)
}
override fun sendSystemMessage(pid: PID, message: SystemMessage) {}
suspend fun await(): T {
val result = withTimeout(timeout.toMillis(), TimeUnit.MILLISECONDS) { cd.await() }
ProcessRegistry.remove(pid)
return result
}
}
|
package actor.proto
import actor.proto.mailbox.SystemMessage
import kotlinx.coroutines.experimental.CompletableDeferred
import kotlinx.coroutines.experimental.withTimeout
import java.time.Duration
import java.util.concurrent.TimeUnit
class DeferredProcess<out T>(private val timeout: Duration = Duration.ofMillis(5000)) : Process() {
val pid = ProcessRegistry.put(ProcessRegistry.nextId(), this)
private val cd = CompletableDeferred<T>()
override fun sendUserMessage(pid: PID, message: Any) {
val m = when (message) {
is MessageEnvelope -> message.message
else -> message
}
@Suppress("UNCHECKED_CAST")
cd.complete(m as T)
}
override fun sendSystemMessage(pid: PID, message: SystemMessage) {}
suspend fun await(): T {
try {
val result = withTimeout(timeout.toMillis(), TimeUnit.MILLISECONDS) { cd.await() }
ProcessRegistry.remove(pid)
return result;
} catch (exception: Exception) {
ProcessRegistry.remove(pid)
throw exception;
}
}
}
|
Remove from registry when an exception (including timeout) occurs
|
Remove from registry when an exception (including timeout) occurs
|
Kotlin
|
apache-2.0
|
AsynkronIT/protoactor-kotlin,AsynkronIT/protoactor-kotlin,AsynkronIT/protoactor-kotlin
|
kotlin
|
## Code Before:
package actor.proto
import actor.proto.mailbox.SystemMessage
import kotlinx.coroutines.experimental.CompletableDeferred
import kotlinx.coroutines.experimental.withTimeout
import java.time.Duration
import java.util.concurrent.TimeUnit
class DeferredProcess<out T>(private val timeout: Duration = Duration.ofMillis(5000)) : Process() {
val pid = ProcessRegistry.put(ProcessRegistry.nextId(), this)
private val cd = CompletableDeferred<T>()
override fun sendUserMessage(pid: PID, message: Any) {
val m = when (message) {
is MessageEnvelope -> message.message
else -> message
}
@Suppress("UNCHECKED_CAST")
cd.complete(m as T)
}
override fun sendSystemMessage(pid: PID, message: SystemMessage) {}
suspend fun await(): T {
val result = withTimeout(timeout.toMillis(), TimeUnit.MILLISECONDS) { cd.await() }
ProcessRegistry.remove(pid)
return result
}
}
## Instruction:
Remove from registry when an exception (including timeout) occurs
## Code After:
package actor.proto
import actor.proto.mailbox.SystemMessage
import kotlinx.coroutines.experimental.CompletableDeferred
import kotlinx.coroutines.experimental.withTimeout
import java.time.Duration
import java.util.concurrent.TimeUnit
class DeferredProcess<out T>(private val timeout: Duration = Duration.ofMillis(5000)) : Process() {
val pid = ProcessRegistry.put(ProcessRegistry.nextId(), this)
private val cd = CompletableDeferred<T>()
override fun sendUserMessage(pid: PID, message: Any) {
val m = when (message) {
is MessageEnvelope -> message.message
else -> message
}
@Suppress("UNCHECKED_CAST")
cd.complete(m as T)
}
override fun sendSystemMessage(pid: PID, message: SystemMessage) {}
suspend fun await(): T {
try {
val result = withTimeout(timeout.toMillis(), TimeUnit.MILLISECONDS) { cd.await() }
ProcessRegistry.remove(pid)
return result;
} catch (exception: Exception) {
ProcessRegistry.remove(pid)
throw exception;
}
}
}
|
...
override fun sendSystemMessage(pid: PID, message: SystemMessage) {}
suspend fun await(): T {
try {
val result = withTimeout(timeout.toMillis(), TimeUnit.MILLISECONDS) { cd.await() }
ProcessRegistry.remove(pid)
return result;
} catch (exception: Exception) {
ProcessRegistry.remove(pid)
throw exception;
}
}
}
...
|
f2ec24fd42d52a52829070b5e31d9a17d145df30
|
arch/microblaze/include/asm/entry.h
|
arch/microblaze/include/asm/entry.h
|
/*
* Definitions used by low-level trap handlers
*
* Copyright (C) 2008 Michal Simek
* Copyright (C) 2007 - 2008 PetaLogix
* Copyright (C) 2007 John Williams <[email protected]>
*
* This file is subject to the terms and conditions of the GNU General
* Public License. See the file COPYING in the main directory of this
* archive for more details.
*/
#ifndef _ASM_MICROBLAZE_ENTRY_H
#define _ASM_MICROBLAZE_ENTRY_H
#include <asm/percpu.h>
#include <asm/ptrace.h>
/*
* These are per-cpu variables required in entry.S, among other
* places
*/
#define PER_CPU(var) per_cpu__##var
# ifndef __ASSEMBLY__
DECLARE_PER_CPU(unsigned int, KSP); /* Saved kernel stack pointer */
DECLARE_PER_CPU(unsigned int, KM); /* Kernel/user mode */
DECLARE_PER_CPU(unsigned int, ENTRY_SP); /* Saved SP on kernel entry */
DECLARE_PER_CPU(unsigned int, R11_SAVE); /* Temp variable for entry */
DECLARE_PER_CPU(unsigned int, CURRENT_SAVE); /* Saved current pointer */
DECLARE_PER_CPU(unsigned int, SYSCALL_SAVE); /* Saved syscall number */
# endif /* __ASSEMBLY__ */
#endif /* _ASM_MICROBLAZE_ENTRY_H */
|
/*
* Definitions used by low-level trap handlers
*
* Copyright (C) 2008 Michal Simek
* Copyright (C) 2007 - 2008 PetaLogix
* Copyright (C) 2007 John Williams <[email protected]>
*
* This file is subject to the terms and conditions of the GNU General
* Public License. See the file COPYING in the main directory of this
* archive for more details.
*/
#ifndef _ASM_MICROBLAZE_ENTRY_H
#define _ASM_MICROBLAZE_ENTRY_H
#include <asm/percpu.h>
#include <asm/ptrace.h>
/*
* These are per-cpu variables required in entry.S, among other
* places
*/
#define PER_CPU(var) per_cpu__##var
# ifndef __ASSEMBLY__
DECLARE_PER_CPU(unsigned int, KSP); /* Saved kernel stack pointer */
DECLARE_PER_CPU(unsigned int, KM); /* Kernel/user mode */
DECLARE_PER_CPU(unsigned int, ENTRY_SP); /* Saved SP on kernel entry */
DECLARE_PER_CPU(unsigned int, R11_SAVE); /* Temp variable for entry */
DECLARE_PER_CPU(unsigned int, CURRENT_SAVE); /* Saved current pointer */
# endif /* __ASSEMBLY__ */
#endif /* _ASM_MICROBLAZE_ENTRY_H */
|
Remove unneded per cpu SYSCALL_SAVE variable
|
microblaze: Remove unneded per cpu SYSCALL_SAVE variable
Signed-off-by: Michal Simek <[email protected]>
|
C
|
mit
|
KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas
|
c
|
## Code Before:
/*
* Definitions used by low-level trap handlers
*
* Copyright (C) 2008 Michal Simek
* Copyright (C) 2007 - 2008 PetaLogix
* Copyright (C) 2007 John Williams <[email protected]>
*
* This file is subject to the terms and conditions of the GNU General
* Public License. See the file COPYING in the main directory of this
* archive for more details.
*/
#ifndef _ASM_MICROBLAZE_ENTRY_H
#define _ASM_MICROBLAZE_ENTRY_H
#include <asm/percpu.h>
#include <asm/ptrace.h>
/*
* These are per-cpu variables required in entry.S, among other
* places
*/
#define PER_CPU(var) per_cpu__##var
# ifndef __ASSEMBLY__
DECLARE_PER_CPU(unsigned int, KSP); /* Saved kernel stack pointer */
DECLARE_PER_CPU(unsigned int, KM); /* Kernel/user mode */
DECLARE_PER_CPU(unsigned int, ENTRY_SP); /* Saved SP on kernel entry */
DECLARE_PER_CPU(unsigned int, R11_SAVE); /* Temp variable for entry */
DECLARE_PER_CPU(unsigned int, CURRENT_SAVE); /* Saved current pointer */
DECLARE_PER_CPU(unsigned int, SYSCALL_SAVE); /* Saved syscall number */
# endif /* __ASSEMBLY__ */
#endif /* _ASM_MICROBLAZE_ENTRY_H */
## Instruction:
microblaze: Remove unneded per cpu SYSCALL_SAVE variable
Signed-off-by: Michal Simek <[email protected]>
## Code After:
/*
* Definitions used by low-level trap handlers
*
* Copyright (C) 2008 Michal Simek
* Copyright (C) 2007 - 2008 PetaLogix
* Copyright (C) 2007 John Williams <[email protected]>
*
* This file is subject to the terms and conditions of the GNU General
* Public License. See the file COPYING in the main directory of this
* archive for more details.
*/
#ifndef _ASM_MICROBLAZE_ENTRY_H
#define _ASM_MICROBLAZE_ENTRY_H
#include <asm/percpu.h>
#include <asm/ptrace.h>
/*
* These are per-cpu variables required in entry.S, among other
* places
*/
#define PER_CPU(var) per_cpu__##var
# ifndef __ASSEMBLY__
DECLARE_PER_CPU(unsigned int, KSP); /* Saved kernel stack pointer */
DECLARE_PER_CPU(unsigned int, KM); /* Kernel/user mode */
DECLARE_PER_CPU(unsigned int, ENTRY_SP); /* Saved SP on kernel entry */
DECLARE_PER_CPU(unsigned int, R11_SAVE); /* Temp variable for entry */
DECLARE_PER_CPU(unsigned int, CURRENT_SAVE); /* Saved current pointer */
# endif /* __ASSEMBLY__ */
#endif /* _ASM_MICROBLAZE_ENTRY_H */
|
# ... existing code ...
DECLARE_PER_CPU(unsigned int, ENTRY_SP); /* Saved SP on kernel entry */
DECLARE_PER_CPU(unsigned int, R11_SAVE); /* Temp variable for entry */
DECLARE_PER_CPU(unsigned int, CURRENT_SAVE); /* Saved current pointer */
# endif /* __ASSEMBLY__ */
#endif /* _ASM_MICROBLAZE_ENTRY_H */
# ... rest of the code ...
|
93358a04380f427f3ac1cea84689a430bcb0c883
|
jenkins/management/commands/import_jenkinsserver.py
|
jenkins/management/commands/import_jenkinsserver.py
|
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from jenkins.management.helpers import import_jenkinsserver
class Command(BaseCommand):
help = "Import or update a JenkinsServer"
args = "[name] [url] [username] [password] [remote]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if server already exists."),
)
def handle(self, *args, **options):
if len(args) != 5:
raise CommandError("must provide all parameters")
name, url, username, password, remote = args
import_jenkinsserver(
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
|
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from jenkins.management.helpers import import_jenkinsserver
class Command(BaseCommand):
help = "Import or update a JenkinsServer"
args = "[name] [url] [username] [password] [remote]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if server already exists."),
)
def handle(self, *args, **options):
if len(args) != 5:
raise CommandError("must provide all parameters")
name, url, username, password, remote = args
import_jenkinsserver(
name, url, username, password, remote,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
|
Make the command-line tool pass the right parameters.
|
Make the command-line tool pass the right parameters.
|
Python
|
mit
|
timrchavez/capomastro,caio1982/capomastro,caio1982/capomastro,caio1982/capomastro,timrchavez/capomastro
|
python
|
## Code Before:
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from jenkins.management.helpers import import_jenkinsserver
class Command(BaseCommand):
help = "Import or update a JenkinsServer"
args = "[name] [url] [username] [password] [remote]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if server already exists."),
)
def handle(self, *args, **options):
if len(args) != 5:
raise CommandError("must provide all parameters")
name, url, username, password, remote = args
import_jenkinsserver(
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
## Instruction:
Make the command-line tool pass the right parameters.
## Code After:
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from jenkins.management.helpers import import_jenkinsserver
class Command(BaseCommand):
help = "Import or update a JenkinsServer"
args = "[name] [url] [username] [password] [remote]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if server already exists."),
)
def handle(self, *args, **options):
if len(args) != 5:
raise CommandError("must provide all parameters")
name, url, username, password, remote = args
import_jenkinsserver(
name, url, username, password, remote,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
|
// ... existing code ...
name, url, username, password, remote = args
import_jenkinsserver(
name, url, username, password, remote,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
// ... rest of the code ...
|
f0c1c078e7edd76b418940f3cbddef405440b5d4
|
GPflowOpt/_version.py
|
GPflowOpt/_version.py
|
__version__ = "pre-release"
|
__version__ = "pre-release" # pragma: no cover
|
Exclude version file from test coverage
|
Exclude version file from test coverage
|
Python
|
apache-2.0
|
GPflow/GPflowOpt
|
python
|
## Code Before:
__version__ = "pre-release"
## Instruction:
Exclude version file from test coverage
## Code After:
__version__ = "pre-release" # pragma: no cover
|
# ... existing code ...
__version__ = "pre-release" # pragma: no cover
# ... rest of the code ...
|
73f4c29d47e23b26483733ab25ea33367657f758
|
test/selenium/src/lib/page/modal/create_new_object.py
|
test/selenium/src/lib/page/modal/create_new_object.py
|
"""Modals for creating new objects"""
from lib.page.modal import base
class NewProgramModal(base.ProgramModal, base.CreateNewObjectModal):
"""Class representing a program modal visible after creating a new
program from LHN"""
class NewControlModal(base.ControlModal, base.CreateNewObjectModal):
"""Class representing a control modal visible after creating a new
control from LHN"""
class NewOrgGroupModal(base.OrgGroupModal, base.CreateNewObjectModal):
"""Class representing an org group modal visible after creating a new
org group from LHN"""
class NewRiskModal(base.RiskModal, base.CreateNewObjectModal):
"""Class representing a risk modal visible after creating a new
risk from LHN"""
|
"""Modals for creating new objects"""
from lib.page.modal import base
class NewProgramModal(base.ProgramModal, base.CreateNewObjectModal):
"""Class representing a program modal visible after creating a new
program from LHN"""
class NewControlModal(base.ControlModal, base.CreateNewObjectModal):
"""Class representing a control modal visible after creating a new
control from LHN"""
class NewOrgGroupModal(base.OrgGroupModal, base.CreateNewObjectModal):
"""Class representing an org group modal visible after creating a new
org group from LHN"""
class NewRiskModal(base.RiskModal, base.CreateNewObjectModal):
"""Class representing a risk modal visible after creating a new
risk from LHN"""
class NewRequestModal(base.RequestModal, base.CreateNewObjectModal):
"""Class representing an request modal visible after creating a new
request from LHN"""
class NewIssueModal(base.IssueModal, base.CreateNewObjectModal):
"""Class representing an issue visible after creating a new
issue from LHN"""
|
Add modals for creating objects
|
Add modals for creating objects
|
Python
|
apache-2.0
|
plamut/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core
|
python
|
## Code Before:
"""Modals for creating new objects"""
from lib.page.modal import base
class NewProgramModal(base.ProgramModal, base.CreateNewObjectModal):
"""Class representing a program modal visible after creating a new
program from LHN"""
class NewControlModal(base.ControlModal, base.CreateNewObjectModal):
"""Class representing a control modal visible after creating a new
control from LHN"""
class NewOrgGroupModal(base.OrgGroupModal, base.CreateNewObjectModal):
"""Class representing an org group modal visible after creating a new
org group from LHN"""
class NewRiskModal(base.RiskModal, base.CreateNewObjectModal):
"""Class representing a risk modal visible after creating a new
risk from LHN"""
## Instruction:
Add modals for creating objects
## Code After:
"""Modals for creating new objects"""
from lib.page.modal import base
class NewProgramModal(base.ProgramModal, base.CreateNewObjectModal):
"""Class representing a program modal visible after creating a new
program from LHN"""
class NewControlModal(base.ControlModal, base.CreateNewObjectModal):
"""Class representing a control modal visible after creating a new
control from LHN"""
class NewOrgGroupModal(base.OrgGroupModal, base.CreateNewObjectModal):
"""Class representing an org group modal visible after creating a new
org group from LHN"""
class NewRiskModal(base.RiskModal, base.CreateNewObjectModal):
"""Class representing a risk modal visible after creating a new
risk from LHN"""
class NewRequestModal(base.RequestModal, base.CreateNewObjectModal):
"""Class representing an request modal visible after creating a new
request from LHN"""
class NewIssueModal(base.IssueModal, base.CreateNewObjectModal):
"""Class representing an issue visible after creating a new
issue from LHN"""
|
...
class NewRiskModal(base.RiskModal, base.CreateNewObjectModal):
"""Class representing a risk modal visible after creating a new
risk from LHN"""
class NewRequestModal(base.RequestModal, base.CreateNewObjectModal):
"""Class representing an request modal visible after creating a new
request from LHN"""
class NewIssueModal(base.IssueModal, base.CreateNewObjectModal):
"""Class representing an issue visible after creating a new
issue from LHN"""
...
|
f3eee368e13ee37048d52bde0d067efea057fef8
|
monkeylearn/extraction.py
|
monkeylearn/extraction.py
|
from __future__ import (
print_function, unicode_literals, division, absolute_import)
from six.moves import range
from monkeylearn.utils import SleepRequestsMixin, MonkeyLearnResponse, HandleErrorsMixin
from monkeylearn.settings import DEFAULT_BASE_ENDPOINT, DEFAULT_BATCH_SIZE
class Extraction(SleepRequestsMixin, HandleErrorsMixin):
def __init__(self, token, base_endpoint=DEFAULT_BASE_ENDPOINT):
self.token = token
self.endpoint = base_endpoint + 'extractors/'
def extract(self, module_id, text_list, batch_size=DEFAULT_BATCH_SIZE,
sleep_if_throttled=True):
text_list = list(text_list)
self.check_batch_limits(text_list, batch_size)
url = self.endpoint + module_id + '/extract/'
res = []
responses = []
for i in range(0, len(text_list), batch_size):
data = {
'text_list': text_list[i:i+batch_size]
}
response = self.make_request(url, 'POST', data, sleep_if_throttled)
self.handle_errors(response)
responses.append(response)
res.extend(response.json()['result'])
return MonkeyLearnResponse(res, responses)
|
from __future__ import (
print_function, unicode_literals, division, absolute_import)
from six.moves import range
from monkeylearn.utils import SleepRequestsMixin, MonkeyLearnResponse, HandleErrorsMixin
from monkeylearn.settings import DEFAULT_BASE_ENDPOINT, DEFAULT_BATCH_SIZE
class Extraction(SleepRequestsMixin, HandleErrorsMixin):
def __init__(self, token, base_endpoint=DEFAULT_BASE_ENDPOINT):
self.token = token
self.endpoint = base_endpoint + 'extractors/'
def extract(self, module_id, text_list, batch_size=DEFAULT_BATCH_SIZE,
sleep_if_throttled=True):
text_list = list(text_list)
self.check_batch_limits(text_list, batch_size)
url = self.endpoint + module_id + '/extract/'
res = []
responses = []
for i in range(0, len(text_list), batch_size):
data = {
'text_list': text_list[i:i+batch_size]
}
if kwargs is not None:
for key, value in kwargs.iteritems():
data[key] = value
response = self.make_request(url, 'POST', data, sleep_if_throttled)
self.handle_errors(response)
responses.append(response)
res.extend(response.json()['result'])
return MonkeyLearnResponse(res, responses)
|
Support for extra parameters in extractors
|
Support for extra parameters in extractors
|
Python
|
mit
|
monkeylearn/monkeylearn-python
|
python
|
## Code Before:
from __future__ import (
print_function, unicode_literals, division, absolute_import)
from six.moves import range
from monkeylearn.utils import SleepRequestsMixin, MonkeyLearnResponse, HandleErrorsMixin
from monkeylearn.settings import DEFAULT_BASE_ENDPOINT, DEFAULT_BATCH_SIZE
class Extraction(SleepRequestsMixin, HandleErrorsMixin):
def __init__(self, token, base_endpoint=DEFAULT_BASE_ENDPOINT):
self.token = token
self.endpoint = base_endpoint + 'extractors/'
def extract(self, module_id, text_list, batch_size=DEFAULT_BATCH_SIZE,
sleep_if_throttled=True):
text_list = list(text_list)
self.check_batch_limits(text_list, batch_size)
url = self.endpoint + module_id + '/extract/'
res = []
responses = []
for i in range(0, len(text_list), batch_size):
data = {
'text_list': text_list[i:i+batch_size]
}
response = self.make_request(url, 'POST', data, sleep_if_throttled)
self.handle_errors(response)
responses.append(response)
res.extend(response.json()['result'])
return MonkeyLearnResponse(res, responses)
## Instruction:
Support for extra parameters in extractors
## Code After:
from __future__ import (
print_function, unicode_literals, division, absolute_import)
from six.moves import range
from monkeylearn.utils import SleepRequestsMixin, MonkeyLearnResponse, HandleErrorsMixin
from monkeylearn.settings import DEFAULT_BASE_ENDPOINT, DEFAULT_BATCH_SIZE
class Extraction(SleepRequestsMixin, HandleErrorsMixin):
def __init__(self, token, base_endpoint=DEFAULT_BASE_ENDPOINT):
self.token = token
self.endpoint = base_endpoint + 'extractors/'
def extract(self, module_id, text_list, batch_size=DEFAULT_BATCH_SIZE,
sleep_if_throttled=True):
text_list = list(text_list)
self.check_batch_limits(text_list, batch_size)
url = self.endpoint + module_id + '/extract/'
res = []
responses = []
for i in range(0, len(text_list), batch_size):
data = {
'text_list': text_list[i:i+batch_size]
}
if kwargs is not None:
for key, value in kwargs.iteritems():
data[key] = value
response = self.make_request(url, 'POST', data, sleep_if_throttled)
self.handle_errors(response)
responses.append(response)
res.extend(response.json()['result'])
return MonkeyLearnResponse(res, responses)
|
...
data = {
'text_list': text_list[i:i+batch_size]
}
if kwargs is not None:
for key, value in kwargs.iteritems():
data[key] = value
response = self.make_request(url, 'POST', data, sleep_if_throttled)
self.handle_errors(response)
responses.append(response)
...
|
88fbd428ceb79d6e176ff235256c8e5951815085
|
inspector/inspector/urls.py
|
inspector/inspector/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^class/', include('cbv.urls')),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Make the url structure a bit more sensible.
|
Make the url structure a bit more sensible.
|
Python
|
bsd-2-clause
|
refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,abhijo89/django-cbv-inspector
|
python
|
## Code Before:
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^class/', include('cbv.urls')),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
## Instruction:
Make the url structure a bit more sensible.
## Code After:
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
...
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
...
|
3eaf0ea514b0f78906af7e614079f3a90624bcc7
|
estimate.py
|
estimate.py
|
from sys import stdin
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
print(disk)
procRates = confElements[1:]
print(procRates)
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
for line in stdin:
confs = line.splitlines()
for conf in confs:
estimateConf(conf)
if __name__ == "__main__":
estimateConfsFromInput()
|
from sys import stdin
def calcExhaustion(disk, procRates):
"""Calculate how many seconds before the disk is filled.
procRates lists the rates at which each process fills 1 byte of disk
space."""
print(disk)
print(procRates)
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
procRates = confElements[1:]
eta = calcExhaustion(disk, procRates);
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
for line in stdin:
confs = line.splitlines()
for conf in confs:
estimateConf(conf)
if __name__ == "__main__":
estimateConfsFromInput()
|
Create fn for calculating exhaustion
|
Create fn for calculating exhaustion
|
Python
|
mit
|
MattHeard/EstimateDiskExhaustion
|
python
|
## Code Before:
from sys import stdin
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
print(disk)
procRates = confElements[1:]
print(procRates)
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
for line in stdin:
confs = line.splitlines()
for conf in confs:
estimateConf(conf)
if __name__ == "__main__":
estimateConfsFromInput()
## Instruction:
Create fn for calculating exhaustion
## Code After:
from sys import stdin
def calcExhaustion(disk, procRates):
"""Calculate how many seconds before the disk is filled.
procRates lists the rates at which each process fills 1 byte of disk
space."""
print(disk)
print(procRates)
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
procRates = confElements[1:]
eta = calcExhaustion(disk, procRates);
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
for line in stdin:
confs = line.splitlines()
for conf in confs:
estimateConf(conf)
if __name__ == "__main__":
estimateConfsFromInput()
|
...
from sys import stdin
def calcExhaustion(disk, procRates):
"""Calculate how many seconds before the disk is filled.
procRates lists the rates at which each process fills 1 byte of disk
space."""
print(disk)
print(procRates)
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
procRates = confElements[1:]
eta = calcExhaustion(disk, procRates);
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
...
|
a84f8c2385f38f8c21cc020b01e372230c33868a
|
app-api/src/main/java/org/ligoj/app/resource/plugin/repository/PluginCache.java
|
app-api/src/main/java/org/ligoj/app/resource/plugin/repository/PluginCache.java
|
/*
* Licensed under MIT (https://github.com/ligoj/ligoj/blob/master/LICENSE)
*/
package org.ligoj.app.resource.plugin.repository;
import java.util.function.Function;
import javax.cache.expiry.AccessedExpiryPolicy;
import javax.cache.expiry.Duration;
import org.ligoj.bootstrap.resource.system.cache.CacheManagerAware;
import org.springframework.stereotype.Component;
import com.hazelcast.cache.HazelcastCacheManager;
import com.hazelcast.config.CacheConfig;
/**
* Plug-in cache configuration.
*/
@Component
public class PluginCache implements CacheManagerAware {
@Override
public void onCreate(final HazelcastCacheManager cacheManager, final Function<String, CacheConfig<?, ?>> provider) {
final CacheConfig<?, ?> central = provider.apply("authorizations");
central.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-central", central);
final CacheConfig<?, ?> nexus = provider.apply("authorizations");
nexus.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-nexus", nexus);
}
}
|
/*
* Licensed under MIT (https://github.com/ligoj/ligoj/blob/master/LICENSE)
*/
package org.ligoj.app.resource.plugin.repository;
import java.util.function.Function;
import javax.cache.expiry.AccessedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ModifiedExpiryPolicy;
import org.ligoj.bootstrap.resource.system.cache.CacheManagerAware;
import org.springframework.stereotype.Component;
import com.hazelcast.cache.HazelcastCacheManager;
import com.hazelcast.config.CacheConfig;
/**
* Plug-in cache configuration.
*/
@Component
public class PluginCache implements CacheManagerAware {
@Override
public void onCreate(final HazelcastCacheManager cacheManager, final Function<String, CacheConfig<?, ?>> provider) {
final CacheConfig<?, ?> central = provider.apply("authorizations");
central.setExpiryPolicyFactory(ModifiedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-central", central);
final CacheConfig<?, ?> nexus = provider.apply("authorizations");
nexus.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-nexus", nexus);
}
}
|
Change expiring cache policy of registry
|
Change expiring cache policy of registry
|
Java
|
mit
|
ligoj/ligoj,ligoj/ligoj,ligoj/ligoj,ligoj/ligoj
|
java
|
## Code Before:
/*
* Licensed under MIT (https://github.com/ligoj/ligoj/blob/master/LICENSE)
*/
package org.ligoj.app.resource.plugin.repository;
import java.util.function.Function;
import javax.cache.expiry.AccessedExpiryPolicy;
import javax.cache.expiry.Duration;
import org.ligoj.bootstrap.resource.system.cache.CacheManagerAware;
import org.springframework.stereotype.Component;
import com.hazelcast.cache.HazelcastCacheManager;
import com.hazelcast.config.CacheConfig;
/**
* Plug-in cache configuration.
*/
@Component
public class PluginCache implements CacheManagerAware {
@Override
public void onCreate(final HazelcastCacheManager cacheManager, final Function<String, CacheConfig<?, ?>> provider) {
final CacheConfig<?, ?> central = provider.apply("authorizations");
central.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-central", central);
final CacheConfig<?, ?> nexus = provider.apply("authorizations");
nexus.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-nexus", nexus);
}
}
## Instruction:
Change expiring cache policy of registry
## Code After:
/*
* Licensed under MIT (https://github.com/ligoj/ligoj/blob/master/LICENSE)
*/
package org.ligoj.app.resource.plugin.repository;
import java.util.function.Function;
import javax.cache.expiry.AccessedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ModifiedExpiryPolicy;
import org.ligoj.bootstrap.resource.system.cache.CacheManagerAware;
import org.springframework.stereotype.Component;
import com.hazelcast.cache.HazelcastCacheManager;
import com.hazelcast.config.CacheConfig;
/**
* Plug-in cache configuration.
*/
@Component
public class PluginCache implements CacheManagerAware {
@Override
public void onCreate(final HazelcastCacheManager cacheManager, final Function<String, CacheConfig<?, ?>> provider) {
final CacheConfig<?, ?> central = provider.apply("authorizations");
central.setExpiryPolicyFactory(ModifiedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-central", central);
final CacheConfig<?, ?> nexus = provider.apply("authorizations");
nexus.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-nexus", nexus);
}
}
|
# ... existing code ...
import javax.cache.expiry.AccessedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ModifiedExpiryPolicy;
import org.ligoj.bootstrap.resource.system.cache.CacheManagerAware;
import org.springframework.stereotype.Component;
# ... modified code ...
@Override
public void onCreate(final HazelcastCacheManager cacheManager, final Function<String, CacheConfig<?, ?>> provider) {
final CacheConfig<?, ?> central = provider.apply("authorizations");
central.setExpiryPolicyFactory(ModifiedExpiryPolicy.factoryOf(Duration.ONE_DAY));
cacheManager.createCache("plugins-last-version-central", central);
final CacheConfig<?, ?> nexus = provider.apply("authorizations");
nexus.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_DAY));
# ... rest of the code ...
|
9209c56661c2b14a09db339cf1551e536965ad7f
|
{{cookiecutter.extension_name}}/{{cookiecutter.extension_name}}/__init__.py
|
{{cookiecutter.extension_name}}/{{cookiecutter.extension_name}}/__init__.py
|
from IPython.display import display, JSON
import json
# Running `npm run build` will create static resources in the static
# directory of this Python package (and create that directory if necessary).
def _jupyter_labextension_paths():
return [{
'name': '{{cookiecutter.extension_name}}',
'src': 'static',
}]
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'static',
'dest': '{{cookiecutter.extension_name}}',
'require': '{{cookiecutter.extension_name}}/extension'
}]
# A display class that can be used within a notebook. E.g.:
# from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}}
# {{cookiecutter.mime_short_name}}(data)
class {{cookiecutter.mime_short_name}}(JSON):
@property
def data(self):
return self._data
@data.setter
def data(self, data):
if isinstance(data, str):
data = json.loads(data)
self._data = data
def _ipython_display_(self):
bundle = {
'{{cookiecutter.mime_type}}': self.data,
'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>'
}
display(bundle, raw=True)
|
from IPython.display import display, JSON
import json
# Running `npm run build` will create static resources in the static
# directory of this Python package (and create that directory if necessary).
def _jupyter_labextension_paths():
return [{
'name': '{{cookiecutter.extension_name}}',
'src': 'static',
}]
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'static',
'dest': '{{cookiecutter.extension_name}}',
'require': '{{cookiecutter.extension_name}}/extension'
}]
# A display class that can be used within a notebook. E.g.:
# from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}}
# {{cookiecutter.mime_short_name}}(data)
class {{cookiecutter.mime_short_name}}(JSON):
"""A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel.
{{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings.
Scalar types (None, number, string) are not allowed, only dict containers.
"""
def _data_and_metadata(self):
return self.data, self.metadata
def _ipython_display_(self):
bundle = {
'{{cookiecutter.mime_type}}': self.data,
'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>'
}
metadata = {
'{{cookiecutter.mime_type}}': self.metadata
}
display(bundle, metadata=metadata, raw=True)
|
Include display metadata in mime bundle
|
Include display metadata in mime bundle
|
Python
|
cc0-1.0
|
gnestor/mimerender-cookiecutter,gnestor/mimerender-cookiecutter,jupyterlab/mimerender-cookiecutter,jupyterlab/mimerender-cookiecutter
|
python
|
## Code Before:
from IPython.display import display, JSON
import json
# Running `npm run build` will create static resources in the static
# directory of this Python package (and create that directory if necessary).
def _jupyter_labextension_paths():
return [{
'name': '{{cookiecutter.extension_name}}',
'src': 'static',
}]
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'static',
'dest': '{{cookiecutter.extension_name}}',
'require': '{{cookiecutter.extension_name}}/extension'
}]
# A display class that can be used within a notebook. E.g.:
# from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}}
# {{cookiecutter.mime_short_name}}(data)
class {{cookiecutter.mime_short_name}}(JSON):
@property
def data(self):
return self._data
@data.setter
def data(self, data):
if isinstance(data, str):
data = json.loads(data)
self._data = data
def _ipython_display_(self):
bundle = {
'{{cookiecutter.mime_type}}': self.data,
'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>'
}
display(bundle, raw=True)
## Instruction:
Include display metadata in mime bundle
## Code After:
from IPython.display import display, JSON
import json
# Running `npm run build` will create static resources in the static
# directory of this Python package (and create that directory if necessary).
def _jupyter_labextension_paths():
return [{
'name': '{{cookiecutter.extension_name}}',
'src': 'static',
}]
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'static',
'dest': '{{cookiecutter.extension_name}}',
'require': '{{cookiecutter.extension_name}}/extension'
}]
# A display class that can be used within a notebook. E.g.:
# from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}}
# {{cookiecutter.mime_short_name}}(data)
class {{cookiecutter.mime_short_name}}(JSON):
"""A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel.
{{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings.
Scalar types (None, number, string) are not allowed, only dict containers.
"""
def _data_and_metadata(self):
return self.data, self.metadata
def _ipython_display_(self):
bundle = {
'{{cookiecutter.mime_type}}': self.data,
'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>'
}
metadata = {
'{{cookiecutter.mime_type}}': self.metadata
}
display(bundle, metadata=metadata, raw=True)
|
...
# {{cookiecutter.mime_short_name}}(data)
class {{cookiecutter.mime_short_name}}(JSON):
"""A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel.
{{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings.
Scalar types (None, number, string) are not allowed, only dict containers.
"""
def _data_and_metadata(self):
return self.data, self.metadata
def _ipython_display_(self):
bundle = {
'{{cookiecutter.mime_type}}': self.data,
'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>'
}
metadata = {
'{{cookiecutter.mime_type}}': self.metadata
}
display(bundle, metadata=metadata, raw=True)
...
|
b62f52a30404901ff3ffa7af90a3f1bdd7d05401
|
project/hhlcallback/utils.py
|
project/hhlcallback/utils.py
|
import environ
env = environ.Env()
HOLVI_CNC = False
def get_holvi_singleton():
global HOLVI_CNC
if HOLVI_CNC:
return HOLVI_CNC
holvi_pool = env('HOLVI_POOL', default=None)
holvi_key = env('HOLVI_APIKEY', default=None)
if not holvi_pool or not holvi_key:
return False
import holviapi
HOLVI_CNC = holviapi.Connection(holvi_pool, holvi_key)
return HOLVI_CNC
|
import holviapi.utils
def get_nordea_payment_reference(member_id, number):
base = member_id + 1000
return holviapi.utils.int2fin_reference(int("%s%s" % (base, number)))
|
Remove copy-pasted code, add helper for making legacy reference number for payments
|
Remove copy-pasted code, add helper for making legacy reference number for payments
|
Python
|
mit
|
HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum
|
python
|
## Code Before:
import environ
env = environ.Env()
HOLVI_CNC = False
def get_holvi_singleton():
global HOLVI_CNC
if HOLVI_CNC:
return HOLVI_CNC
holvi_pool = env('HOLVI_POOL', default=None)
holvi_key = env('HOLVI_APIKEY', default=None)
if not holvi_pool or not holvi_key:
return False
import holviapi
HOLVI_CNC = holviapi.Connection(holvi_pool, holvi_key)
return HOLVI_CNC
## Instruction:
Remove copy-pasted code, add helper for making legacy reference number for payments
## Code After:
import holviapi.utils
def get_nordea_payment_reference(member_id, number):
base = member_id + 1000
return holviapi.utils.int2fin_reference(int("%s%s" % (base, number)))
|
# ... existing code ...
import holviapi.utils
def get_nordea_payment_reference(member_id, number):
base = member_id + 1000
return holviapi.utils.int2fin_reference(int("%s%s" % (base, number)))
# ... rest of the code ...
|
1bbf986cbde2d0ec8add3ac845cb10fcd061e46d
|
nodeconductor/server/test_settings.py
|
nodeconductor/server/test_settings.py
|
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
|
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
# XXX: This option should be removed after itacloud assembly creation.
NODECONDUCTOR['IS_ITACLOUD'] = True
|
Add "IS_ITACLOUD" flag to settings
|
Add "IS_ITACLOUD" flag to settings
- itacloud-7125
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
python
|
## Code Before:
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
## Instruction:
Add "IS_ITACLOUD" flag to settings
- itacloud-7125
## Code After:
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
# XXX: This option should be removed after itacloud assembly creation.
NODECONDUCTOR['IS_ITACLOUD'] = True
|
# ... existing code ...
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
# XXX: This option should be removed after itacloud assembly creation.
NODECONDUCTOR['IS_ITACLOUD'] = True
# ... rest of the code ...
|
8eed621a15dafc8b0965c59b8da2296f8193d0ca
|
karabo_data/tests/test_agipd_geometry.py
|
karabo_data/tests/test_agipd_geometry.py
|
import numpy as np
from karabo_data.geometry2 import AGIPD_1MGeometry
def test_snap_assemble_data():
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
snap_geom = geom.snap()
stacked_data = np.zeros((16, 512, 128))
img, centre = snap_geom.position_all_modules(stacked_data)
assert img.shape == (1296, 1132)
assert tuple(centre) == (651, 570)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
|
import numpy as np
from karabo_data.geometry2 import AGIPD_1MGeometry
def test_snap_assemble_data():
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
snap_geom = geom.snap()
stacked_data = np.zeros((16, 512, 128))
img, centre = snap_geom.position_all_modules(stacked_data)
assert img.shape == (1296, 1132)
assert tuple(centre) == (651, 570)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
def test_write_read_crystfel_file(tmpdir):
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
path = str(tmpdir / 'test.geom')
geom.write_crystfel_geom(path)
# We need to add some experiment details before cfelpyutils will read the
# file
with open(path, 'r') as f:
contents = f.read()
with open(path, 'w') as f:
f.write('clen = 0.119\n')
f.write('adu_per_eV = 0.0075\n')
f.write(contents)
loaded = AGIPD_1MGeometry.from_crystfel_geom(path)
np.testing.assert_allclose(loaded.modules[0][0].corner_pos,
geom.modules[0][0].corner_pos)
np.testing.assert_allclose(loaded.modules[0][0].fs_vec,
geom.modules[0][0].fs_vec)
|
Add test of reading & writing CrystFEL geometry
|
Add test of reading & writing CrystFEL geometry
|
Python
|
bsd-3-clause
|
European-XFEL/h5tools-py
|
python
|
## Code Before:
import numpy as np
from karabo_data.geometry2 import AGIPD_1MGeometry
def test_snap_assemble_data():
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
snap_geom = geom.snap()
stacked_data = np.zeros((16, 512, 128))
img, centre = snap_geom.position_all_modules(stacked_data)
assert img.shape == (1296, 1132)
assert tuple(centre) == (651, 570)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
## Instruction:
Add test of reading & writing CrystFEL geometry
## Code After:
import numpy as np
from karabo_data.geometry2 import AGIPD_1MGeometry
def test_snap_assemble_data():
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
snap_geom = geom.snap()
stacked_data = np.zeros((16, 512, 128))
img, centre = snap_geom.position_all_modules(stacked_data)
assert img.shape == (1296, 1132)
assert tuple(centre) == (651, 570)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
def test_write_read_crystfel_file(tmpdir):
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
path = str(tmpdir / 'test.geom')
geom.write_crystfel_geom(path)
# We need to add some experiment details before cfelpyutils will read the
# file
with open(path, 'r') as f:
contents = f.read()
with open(path, 'w') as f:
f.write('clen = 0.119\n')
f.write('adu_per_eV = 0.0075\n')
f.write(contents)
loaded = AGIPD_1MGeometry.from_crystfel_geom(path)
np.testing.assert_allclose(loaded.modules[0][0].corner_pos,
geom.modules[0][0].corner_pos)
np.testing.assert_allclose(loaded.modules[0][0].fs_vec,
geom.modules[0][0].fs_vec)
|
...
assert tuple(centre) == (651, 570)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
def test_write_read_crystfel_file(tmpdir):
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
path = str(tmpdir / 'test.geom')
geom.write_crystfel_geom(path)
# We need to add some experiment details before cfelpyutils will read the
# file
with open(path, 'r') as f:
contents = f.read()
with open(path, 'w') as f:
f.write('clen = 0.119\n')
f.write('adu_per_eV = 0.0075\n')
f.write(contents)
loaded = AGIPD_1MGeometry.from_crystfel_geom(path)
np.testing.assert_allclose(loaded.modules[0][0].corner_pos,
geom.modules[0][0].corner_pos)
np.testing.assert_allclose(loaded.modules[0][0].fs_vec,
geom.modules[0][0].fs_vec)
...
|
f458afb945d99e34e4172b84d853a674897a4802
|
bind-event/src/main/java/jp/glassmoon/app/App.java
|
bind-event/src/main/java/jp/glassmoon/app/App.java
|
package jp.glassmoon.app;
import java.io.File;
import jp.glassmoon.events.GerritEvent;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gerrit.server.events.PatchSetCreatedEvent;
public class App
{
public static void main( String[] args ) throws Exception
{
File f = new File("event.txt");
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
GerritEvent ev = mapper.readValue(f, GerritEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(ev));
if (new PatchSetCreatedEvent().type.equals(ev.type)) {
PatchSetCreatedEvent event = mapper.readValue(f, PatchSetCreatedEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(event));
}
}
}
|
package jp.glassmoon.app;
import java.io.File;
import jp.glassmoon.events.GerritEvent;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gerrit.server.events.ChangeEvent;
import com.google.gerrit.server.events.PatchSetCreatedEvent;
public class App
{
public static void main( String[] args ) throws Exception
{
File f = new File("event.txt");
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
GerritEvent ev = mapper.readValue(f, GerritEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(ev));
ChangeEvent event;
if (new PatchSetCreatedEvent().type.equals(ev.type)) {
event = mapper.readValue(f, PatchSetCreatedEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(event));
}
}
}
|
Change type to abstract class
|
Change type to abstract class
|
Java
|
apache-2.0
|
rinrinne/bind-event-sample
|
java
|
## Code Before:
package jp.glassmoon.app;
import java.io.File;
import jp.glassmoon.events.GerritEvent;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gerrit.server.events.PatchSetCreatedEvent;
public class App
{
public static void main( String[] args ) throws Exception
{
File f = new File("event.txt");
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
GerritEvent ev = mapper.readValue(f, GerritEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(ev));
if (new PatchSetCreatedEvent().type.equals(ev.type)) {
PatchSetCreatedEvent event = mapper.readValue(f, PatchSetCreatedEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(event));
}
}
}
## Instruction:
Change type to abstract class
## Code After:
package jp.glassmoon.app;
import java.io.File;
import jp.glassmoon.events.GerritEvent;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gerrit.server.events.ChangeEvent;
import com.google.gerrit.server.events.PatchSetCreatedEvent;
public class App
{
public static void main( String[] args ) throws Exception
{
File f = new File("event.txt");
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
GerritEvent ev = mapper.readValue(f, GerritEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(ev));
ChangeEvent event;
if (new PatchSetCreatedEvent().type.equals(ev.type)) {
event = mapper.readValue(f, PatchSetCreatedEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(event));
}
}
}
|
...
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gerrit.server.events.ChangeEvent;
import com.google.gerrit.server.events.PatchSetCreatedEvent;
public class App
...
GerritEvent ev = mapper.readValue(f, GerritEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(ev));
ChangeEvent event;
if (new PatchSetCreatedEvent().type.equals(ev.type)) {
event = mapper.readValue(f, PatchSetCreatedEvent.class);
System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(event));
}
}
...
|
c52f538dd747219abdb2e2e608c8e36a50f49216
|
src/main/java/yokohama/unit/ast/InstanceSuchThatMatcher.java
|
src/main/java/yokohama/unit/ast/InstanceSuchThatMatcher.java
|
package yokohama.unit.ast;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import lombok.experimental.FieldDefaults;
@ToString
@EqualsAndHashCode(exclude={"span"})
@AllArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
@Getter
public class InstanceSuchThatMatcher implements Matcher {
private String varName;
private ClassType clazz;
private List<Proposition> propositions;
private Span span;
@Override
public <T> T accept(MatcherVisitor<T> visitor) {
return visitor.visitInstanceSuchThat(this);
}
@Override
public String getDescription() {
return "an instance of " + clazz.getName() + " s.t. ...";
}
}
|
package yokohama.unit.ast;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import lombok.experimental.FieldDefaults;
@ToString
@EqualsAndHashCode(exclude={"span"})
@AllArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
@Getter
public class InstanceSuchThatMatcher implements Matcher {
private String varName;
private ClassType clazz;
private List<Proposition> propositions;
private Span span;
@Override
public <T> T accept(MatcherVisitor<T> visitor) {
return visitor.visitInstanceSuchThat(this);
}
@Override
public String getDescription() {
return "an instance " + varName + " of " + clazz.getName() + " s.t. " +
propositions.stream().map(Proposition::getDescription).collect(Collectors.joining(" and "));
}
}
|
Include propositions in the description
|
Include propositions in the description
|
Java
|
mit
|
tkob/yokohamaunit,tkob/yokohamaunit
|
java
|
## Code Before:
package yokohama.unit.ast;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import lombok.experimental.FieldDefaults;
@ToString
@EqualsAndHashCode(exclude={"span"})
@AllArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
@Getter
public class InstanceSuchThatMatcher implements Matcher {
private String varName;
private ClassType clazz;
private List<Proposition> propositions;
private Span span;
@Override
public <T> T accept(MatcherVisitor<T> visitor) {
return visitor.visitInstanceSuchThat(this);
}
@Override
public String getDescription() {
return "an instance of " + clazz.getName() + " s.t. ...";
}
}
## Instruction:
Include propositions in the description
## Code After:
package yokohama.unit.ast;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import lombok.experimental.FieldDefaults;
@ToString
@EqualsAndHashCode(exclude={"span"})
@AllArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
@Getter
public class InstanceSuchThatMatcher implements Matcher {
private String varName;
private ClassType clazz;
private List<Proposition> propositions;
private Span span;
@Override
public <T> T accept(MatcherVisitor<T> visitor) {
return visitor.visitInstanceSuchThat(this);
}
@Override
public String getDescription() {
return "an instance " + varName + " of " + clazz.getName() + " s.t. " +
propositions.stream().map(Proposition::getDescription).collect(Collectors.joining(" and "));
}
}
|
// ... existing code ...
@Override
public String getDescription() {
return "an instance " + varName + " of " + clazz.getName() + " s.t. " +
propositions.stream().map(Proposition::getDescription).collect(Collectors.joining(" and "));
}
}
// ... rest of the code ...
|
aaad392fedca6b3f9879240591877f6a64d907c3
|
wordcloud/wordcloud.py
|
wordcloud/wordcloud.py
|
import os
from operator import itemgetter
from haystack.query import SearchQuerySet
from pombola.hansard import models as hansard_models
BASEDIR = os.path.dirname(__file__)
# normal english stop words and hansard-centric words to ignore
STOP_WORDS = open(os.path.join(BASEDIR, 'stopwords.txt'), 'rU').read().splitlines()
def popular_words(max_entries=20):
sqs = SearchQuerySet().models(hansard_models.Entry).order_by('-sitting_start_date')
cloudlist = []
try:
# Generate tag cloud from content of returned entries
words = {}
for entry in sqs[:max_entries]:
text = entry.object.content
for x in text.lower().split():
cleanx = x.replace(',', '').replace('.', '').replace('"', '').strip()
if cleanx not in STOP_WORDS: # and not cleanx in hansard_words:
words[cleanx] = 1 + words.get(cleanx, 0)
for word in words:
cloudlist.append(
{
"text": word,
"weight": words.get(word),
"link": "/search/hansard/?q=%s" % word,
}
)
sortedlist = sorted(cloudlist, key=itemgetter('weight'), reverse=True)[:25]
except:
sortedlist = []
return sortedlist
|
import os
from operator import itemgetter
from haystack.query import SearchQuerySet
from pombola.hansard import models as hansard_models
BASEDIR = os.path.dirname(__file__)
# normal english stop words and hansard-centric words to ignore
STOP_WORDS = open(os.path.join(BASEDIR, 'stopwords.txt'), 'rU').read().splitlines()
def popular_words(max_entries=20, max_words=25):
sqs = SearchQuerySet().models(hansard_models.Entry).order_by('-sitting_start_date')
cloudlist = []
try:
# Generate tag cloud from content of returned entries
words = {}
for entry in sqs[:max_entries]:
text = entry.object.content
for x in text.lower().split():
cleanx = x.replace(',', '').replace('.', '').replace('"', '').strip()
if cleanx not in STOP_WORDS: # and not cleanx in hansard_words:
words[cleanx] = 1 + words.get(cleanx, 0)
for word in words:
cloudlist.append(
{
"text": word,
"weight": words.get(word),
"link": "/search/hansard/?q=%s" % word,
}
)
sortedlist = sorted(cloudlist, key=itemgetter('weight'), reverse=True)[:max_words]
except:
sortedlist = []
return sortedlist
|
Make maximum number of words a parameter
|
Make maximum number of words a parameter
|
Python
|
agpl-3.0
|
geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola
|
python
|
## Code Before:
import os
from operator import itemgetter
from haystack.query import SearchQuerySet
from pombola.hansard import models as hansard_models
BASEDIR = os.path.dirname(__file__)
# normal english stop words and hansard-centric words to ignore
STOP_WORDS = open(os.path.join(BASEDIR, 'stopwords.txt'), 'rU').read().splitlines()
def popular_words(max_entries=20):
sqs = SearchQuerySet().models(hansard_models.Entry).order_by('-sitting_start_date')
cloudlist = []
try:
# Generate tag cloud from content of returned entries
words = {}
for entry in sqs[:max_entries]:
text = entry.object.content
for x in text.lower().split():
cleanx = x.replace(',', '').replace('.', '').replace('"', '').strip()
if cleanx not in STOP_WORDS: # and not cleanx in hansard_words:
words[cleanx] = 1 + words.get(cleanx, 0)
for word in words:
cloudlist.append(
{
"text": word,
"weight": words.get(word),
"link": "/search/hansard/?q=%s" % word,
}
)
sortedlist = sorted(cloudlist, key=itemgetter('weight'), reverse=True)[:25]
except:
sortedlist = []
return sortedlist
## Instruction:
Make maximum number of words a parameter
## Code After:
import os
from operator import itemgetter
from haystack.query import SearchQuerySet
from pombola.hansard import models as hansard_models
BASEDIR = os.path.dirname(__file__)
# normal english stop words and hansard-centric words to ignore
STOP_WORDS = open(os.path.join(BASEDIR, 'stopwords.txt'), 'rU').read().splitlines()
def popular_words(max_entries=20, max_words=25):
sqs = SearchQuerySet().models(hansard_models.Entry).order_by('-sitting_start_date')
cloudlist = []
try:
# Generate tag cloud from content of returned entries
words = {}
for entry in sqs[:max_entries]:
text = entry.object.content
for x in text.lower().split():
cleanx = x.replace(',', '').replace('.', '').replace('"', '').strip()
if cleanx not in STOP_WORDS: # and not cleanx in hansard_words:
words[cleanx] = 1 + words.get(cleanx, 0)
for word in words:
cloudlist.append(
{
"text": word,
"weight": words.get(word),
"link": "/search/hansard/?q=%s" % word,
}
)
sortedlist = sorted(cloudlist, key=itemgetter('weight'), reverse=True)[:max_words]
except:
sortedlist = []
return sortedlist
|
...
STOP_WORDS = open(os.path.join(BASEDIR, 'stopwords.txt'), 'rU').read().splitlines()
def popular_words(max_entries=20, max_words=25):
sqs = SearchQuerySet().models(hansard_models.Entry).order_by('-sitting_start_date')
cloudlist = []
...
}
)
sortedlist = sorted(cloudlist, key=itemgetter('weight'), reverse=True)[:max_words]
except:
sortedlist = []
...
|
c1de3bddb7e440064f15fd2a340cfea41f9e7be4
|
heltour/tournament/management/commands/cleanupcomments.py
|
heltour/tournament/management/commands/cleanupcomments.py
|
import random
import string
from django.core.management import BaseCommand
from django.utils import timezone
from heltour.tournament.models import *
from django_comments.models import Comment
from django.contrib.contenttypes.models import ContentType
class Command(BaseCommand):
help = "Removes ALL emails from the database."
def handle(self, *args, **options):
letters = ''.join([random.choice(string.ascii_letters) for x in range(4)])
value = input(f"Are you sure you want to clean up all comments? Type: {letters} to confirm: ")
if letters != value:
print("You got it wrong, exiting")
return
print("Cleaning up all comments")
models = [
"player",
"registration",
"seasonplayer",
"playerpairing",
"loneplayerpairing",
"alternate",
"alternateassignment",
"playeravailability",
"playerlateregistration",
"playerbye",
"playerwithdrawal",
"playerwarning",
]
ct_pks = [
ct.pk for ct in ContentType.objects.filter(model__in=models)
]
assert(len(ct_pks) == len(models))
for badword in ['mark', 'cheat', 'alt', 'tos violation']:
Comment.objects.filter(
content_type_id__in=ct_pks,
comment__icontains=badword,
).delete()
|
import random
import string
from datetime import datetime
from django.core.management import BaseCommand
from django.utils import timezone
from heltour.tournament.models import *
from django_comments.models import Comment
from django.contrib.contenttypes.models import ContentType
class Command(BaseCommand):
help = "Removes ALL emails from the database."
def handle(self, *args, **options):
letters = ''.join([random.choice(string.ascii_letters) for x in range(4)])
value = input(f"Are you sure you want to clean up all comments? Type: {letters} to confirm: ")
if letters != value:
print("You got it wrong, exiting")
return
print("Cleaning up all comments")
models = [
"player",
"registration",
"seasonplayer",
"playerpairing",
"loneplayerpairing",
"alternate",
"alternateassignment",
"playeravailability",
"playerlateregistration",
"playerbye",
"playerwithdrawal",
"playerwarning",
]
ct_pks = [
ct.pk for ct in ContentType.objects.filter(model__in=models)
]
assert(len(ct_pks) == len(models))
jan_01_2021 = timezone.make_aware(datetime(2021, 1, 1))
Comment.objects.filter(
content_type_id__in=ct_pks,
submit_date__lte=jan_01_2021
).exclude(
user_name="System"
).delete()
|
Remove all moderator made comments before 2021/01/01
|
Remove all moderator made comments before 2021/01/01
|
Python
|
mit
|
cyanfish/heltour,cyanfish/heltour,cyanfish/heltour,cyanfish/heltour
|
python
|
## Code Before:
import random
import string
from django.core.management import BaseCommand
from django.utils import timezone
from heltour.tournament.models import *
from django_comments.models import Comment
from django.contrib.contenttypes.models import ContentType
class Command(BaseCommand):
help = "Removes ALL emails from the database."
def handle(self, *args, **options):
letters = ''.join([random.choice(string.ascii_letters) for x in range(4)])
value = input(f"Are you sure you want to clean up all comments? Type: {letters} to confirm: ")
if letters != value:
print("You got it wrong, exiting")
return
print("Cleaning up all comments")
models = [
"player",
"registration",
"seasonplayer",
"playerpairing",
"loneplayerpairing",
"alternate",
"alternateassignment",
"playeravailability",
"playerlateregistration",
"playerbye",
"playerwithdrawal",
"playerwarning",
]
ct_pks = [
ct.pk for ct in ContentType.objects.filter(model__in=models)
]
assert(len(ct_pks) == len(models))
for badword in ['mark', 'cheat', 'alt', 'tos violation']:
Comment.objects.filter(
content_type_id__in=ct_pks,
comment__icontains=badword,
).delete()
## Instruction:
Remove all moderator made comments before 2021/01/01
## Code After:
import random
import string
from datetime import datetime
from django.core.management import BaseCommand
from django.utils import timezone
from heltour.tournament.models import *
from django_comments.models import Comment
from django.contrib.contenttypes.models import ContentType
class Command(BaseCommand):
help = "Removes ALL emails from the database."
def handle(self, *args, **options):
letters = ''.join([random.choice(string.ascii_letters) for x in range(4)])
value = input(f"Are you sure you want to clean up all comments? Type: {letters} to confirm: ")
if letters != value:
print("You got it wrong, exiting")
return
print("Cleaning up all comments")
models = [
"player",
"registration",
"seasonplayer",
"playerpairing",
"loneplayerpairing",
"alternate",
"alternateassignment",
"playeravailability",
"playerlateregistration",
"playerbye",
"playerwithdrawal",
"playerwarning",
]
ct_pks = [
ct.pk for ct in ContentType.objects.filter(model__in=models)
]
assert(len(ct_pks) == len(models))
jan_01_2021 = timezone.make_aware(datetime(2021, 1, 1))
Comment.objects.filter(
content_type_id__in=ct_pks,
submit_date__lte=jan_01_2021
).exclude(
user_name="System"
).delete()
|
# ... existing code ...
import random
import string
from datetime import datetime
from django.core.management import BaseCommand
from django.utils import timezone
from heltour.tournament.models import *
# ... modified code ...
ct.pk for ct in ContentType.objects.filter(model__in=models)
]
assert(len(ct_pks) == len(models))
jan_01_2021 = timezone.make_aware(datetime(2021, 1, 1))
Comment.objects.filter(
content_type_id__in=ct_pks,
submit_date__lte=jan_01_2021
).exclude(
user_name="System"
).delete()
# ... rest of the code ...
|
8d04eff68c7b68f643ba40d6153e68507e929fc4
|
client/android/WorldScope/app/src/main/java/com/litmus/worldscope/model/WorldScopeCreatedStream.java
|
client/android/WorldScope/app/src/main/java/com/litmus/worldscope/model/WorldScopeCreatedStream.java
|
package com.litmus.worldscope.model;
import java.util.Date;
/**
* Stream object returned from WorldScope App Server when creating a new stream
*/
public class WorldScopeCreatedStream extends WorldScopeStream {
private Date endedAt;
private String streamLink;
public Date getEndedAt() {return endedAt;}
public String getStreamLink() {return streamLink;};
public void setEndedAt(Date endedAt) {this.endedAt = endedAt;}
public void setStreamLink(String streamLink) {this.streamLink = streamLink;}
}
|
package com.litmus.worldscope.model;
import java.util.Date;
/**
* Stream object returned from WorldScope App Server when creating a new stream
*/
public class WorldScopeCreatedStream extends WorldScopeStream {
private String endedAt;
private String streamLink;
public String getEndedAt() {return endedAt;}
public String getStreamLink() {return streamLink;};
public void setEndedAt(String endedAt) {this.endedAt = endedAt;}
public void setStreamLink(String streamLink) {this.streamLink = streamLink;}
}
|
Refactor Date to a String for compatibility purposes
|
Refactor Date to a String for compatibility purposes
|
Java
|
mit
|
nus-mtp/worldscope,nus-mtp/worldscope,nus-mtp/worldscope,nus-mtp/worldscope
|
java
|
## Code Before:
package com.litmus.worldscope.model;
import java.util.Date;
/**
* Stream object returned from WorldScope App Server when creating a new stream
*/
public class WorldScopeCreatedStream extends WorldScopeStream {
private Date endedAt;
private String streamLink;
public Date getEndedAt() {return endedAt;}
public String getStreamLink() {return streamLink;};
public void setEndedAt(Date endedAt) {this.endedAt = endedAt;}
public void setStreamLink(String streamLink) {this.streamLink = streamLink;}
}
## Instruction:
Refactor Date to a String for compatibility purposes
## Code After:
package com.litmus.worldscope.model;
import java.util.Date;
/**
* Stream object returned from WorldScope App Server when creating a new stream
*/
public class WorldScopeCreatedStream extends WorldScopeStream {
private String endedAt;
private String streamLink;
public String getEndedAt() {return endedAt;}
public String getStreamLink() {return streamLink;};
public void setEndedAt(String endedAt) {this.endedAt = endedAt;}
public void setStreamLink(String streamLink) {this.streamLink = streamLink;}
}
|
...
*/
public class WorldScopeCreatedStream extends WorldScopeStream {
private String endedAt;
private String streamLink;
public String getEndedAt() {return endedAt;}
public String getStreamLink() {return streamLink;};
public void setEndedAt(String endedAt) {this.endedAt = endedAt;}
public void setStreamLink(String streamLink) {this.streamLink = streamLink;}
}
...
|
cf5fb07651099e38e6487eae641da07feda40b05
|
numba/tests/test_api.py
|
numba/tests/test_api.py
|
import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
|
import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
|
Add testcases for jit and njit with forceobj and nopython
|
Add testcases for jit and njit with forceobj and nopython
|
Python
|
bsd-2-clause
|
numba/numba,cpcloud/numba,seibert/numba,stuartarchibald/numba,sklam/numba,seibert/numba,stonebig/numba,IntelLabs/numba,cpcloud/numba,stuartarchibald/numba,IntelLabs/numba,IntelLabs/numba,numba/numba,stonebig/numba,stonebig/numba,IntelLabs/numba,seibert/numba,stonebig/numba,gmarkall/numba,numba/numba,sklam/numba,IntelLabs/numba,stuartarchibald/numba,seibert/numba,seibert/numba,sklam/numba,stonebig/numba,sklam/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,stuartarchibald/numba,cpcloud/numba,gmarkall/numba,gmarkall/numba,gmarkall/numba,cpcloud/numba,gmarkall/numba,sklam/numba,numba/numba
|
python
|
## Code Before:
import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
## Instruction:
Add testcases for jit and njit with forceobj and nopython
## Code After:
import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
# ... modified code ...
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
# ... rest of the code ...
|
52a3a7b2a6aac284b9dd1a7edfb27cdec4d33675
|
lib/pyfrc/test_support/pyfrc_fake_hooks.py
|
lib/pyfrc/test_support/pyfrc_fake_hooks.py
|
from hal_impl.data import hal_data
class PyFrcFakeHooks:
'''
Defines hal hooks that use the fake time object
'''
def __init__(self, fake_time):
self.fake_time = fake_time
#
# Hook functions
#
def getTime(self):
return self.fake_time.get()
def getFPGATime(self):
return int((self.fake_time.get() - hal_data['time']['program_start']) * 1000000)
def delayMillis(self, ms):
self.fake_time.increment_time_by(.001 * ms)
def delaySeconds(self, s):
self.fake_time.increment_time_by(s)
def initializeDriverStation(self):
pass
@property
def ds_cond(self):
return self.fake_time.ds_cond
@ds_cond.setter
def ds_cond(self, value):
pass # ignored
|
from hal_impl.sim_hooks import SimHooks
class PyFrcFakeHooks(SimHooks):
'''
Defines hal hooks that use the fake time object
'''
def __init__(self, fake_time):
self.fake_time = fake_time
super().__init__()
#
# Time related hooks
#
def getTime(self):
return self.fake_time.get()
def delayMillis(self, ms):
self.fake_time.increment_time_by(.001 * ms)
def delaySeconds(self, s):
self.fake_time.increment_time_by(s)
#
# DriverStation related hooks
#
@property
def ds_cond(self):
return self.fake_time.ds_cond
@ds_cond.setter
def ds_cond(self, value):
pass # ignored
|
Update sim hooks for 2018
|
Update sim hooks for 2018
|
Python
|
mit
|
robotpy/pyfrc
|
python
|
## Code Before:
from hal_impl.data import hal_data
class PyFrcFakeHooks:
'''
Defines hal hooks that use the fake time object
'''
def __init__(self, fake_time):
self.fake_time = fake_time
#
# Hook functions
#
def getTime(self):
return self.fake_time.get()
def getFPGATime(self):
return int((self.fake_time.get() - hal_data['time']['program_start']) * 1000000)
def delayMillis(self, ms):
self.fake_time.increment_time_by(.001 * ms)
def delaySeconds(self, s):
self.fake_time.increment_time_by(s)
def initializeDriverStation(self):
pass
@property
def ds_cond(self):
return self.fake_time.ds_cond
@ds_cond.setter
def ds_cond(self, value):
pass # ignored
## Instruction:
Update sim hooks for 2018
## Code After:
from hal_impl.sim_hooks import SimHooks
class PyFrcFakeHooks(SimHooks):
'''
Defines hal hooks that use the fake time object
'''
def __init__(self, fake_time):
self.fake_time = fake_time
super().__init__()
#
# Time related hooks
#
def getTime(self):
return self.fake_time.get()
def delayMillis(self, ms):
self.fake_time.increment_time_by(.001 * ms)
def delaySeconds(self, s):
self.fake_time.increment_time_by(s)
#
# DriverStation related hooks
#
@property
def ds_cond(self):
return self.fake_time.ds_cond
@ds_cond.setter
def ds_cond(self, value):
pass # ignored
|
...
from hal_impl.sim_hooks import SimHooks
class PyFrcFakeHooks(SimHooks):
'''
Defines hal hooks that use the fake time object
'''
...
def __init__(self, fake_time):
self.fake_time = fake_time
super().__init__()
#
# Time related hooks
#
def getTime(self):
return self.fake_time.get()
def delayMillis(self, ms):
self.fake_time.increment_time_by(.001 * ms)
...
def delaySeconds(self, s):
self.fake_time.increment_time_by(s)
#
# DriverStation related hooks
#
@property
def ds_cond(self):
...
|
6cfc94d8a03439c55808090aa5e3a4f35c288887
|
menpodetect/tests/opencv_test.py
|
menpodetect/tests/opencv_test.py
|
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
|
from numpy.testing import assert_allclose
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert_allclose(len(pcs), 1)
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
|
Use assert_allclose so we can see the appveyor failure
|
Use assert_allclose so we can see the appveyor failure
|
Python
|
bsd-3-clause
|
yuxiang-zhou/menpodetect,jabooth/menpodetect,yuxiang-zhou/menpodetect,jabooth/menpodetect
|
python
|
## Code Before:
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
## Instruction:
Use assert_allclose so we can see the appveyor failure
## Code After:
from numpy.testing import assert_allclose
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert_allclose(len(pcs), 1)
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
|
// ... existing code ...
from numpy.testing import assert_allclose
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
// ... modified code ...
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert_allclose(len(pcs), 1)
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
// ... rest of the code ...
|
4946ae0305a6add9247149784cea62823272b39e
|
seleniumlogin/__init__.py
|
seleniumlogin/__init__.py
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/'
}
driver.add_cookie(cookie)
driver.refresh()
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
Add domain to cookie to set cookie for PhantomJS
|
Add domain to cookie to set cookie for PhantomJS
|
Python
|
mit
|
feffe/django-selenium-login,feffe/django-selenium-login
|
python
|
## Code Before:
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/'
}
driver.add_cookie(cookie)
driver.refresh()
## Instruction:
Add domain to cookie to set cookie for PhantomJS
## Code After:
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
...
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
...
|
fea93e818380d4d9b3b68bc47509058f60b8cdb7
|
src/arch/sparc/kernel/arch.c
|
src/arch/sparc/kernel/arch.c
|
/**
* @file
* @brief Implements ARCH interface for sparc processors
*
* @date 14.02.10
* @author Eldar Abusalimov
*/
#include <hal/arch.h>
#include <asm/cache.h>
#include <hal/ipl.h>
void arch_init(void) {
cache_enable();
}
void arch_idle(void) {
}
unsigned int arch_excep_disable(void) {
unsigned int ret;
unsigned int tmp;
__asm__ __volatile__ (
"rd %%psr, %0\n\t"
"andn %0, %2, %1\n\t"
"wr %1, 0, %%psr\n\t"
" nop; nop; nop\n"
: "=&r" (ret), "=r" (tmp)
: "i" (PSR_ET)
: "memory"
);
return ret;
}
void __attribute__ ((noreturn)) arch_shutdown(arch_shutdown_mode_t mode) {
ipl_disable();
arch_excep_disable();
asm ("ta 0");
while (1) {}
}
|
/**
* @file
* @brief Implements ARCH interface for sparc processors
*
* @date 14.02.10
* @author Eldar Abusalimov
*/
#include <hal/arch.h>
#include <asm/cache.h>
#include <hal/ipl.h>
void arch_init(void) {
cache_enable();
}
void arch_idle(void) {
__asm__ __volatile__ ("wr %g0, %asr19");
}
unsigned int arch_excep_disable(void) {
unsigned int ret;
unsigned int tmp;
__asm__ __volatile__ (
"rd %%psr, %0\n\t"
"andn %0, %2, %1\n\t"
"wr %1, 0, %%psr\n\t"
" nop; nop; nop\n"
: "=&r" (ret), "=r" (tmp)
: "i" (PSR_ET)
: "memory"
);
return ret;
}
void __attribute__ ((noreturn)) arch_shutdown(arch_shutdown_mode_t mode) {
ipl_disable();
arch_excep_disable();
asm ("ta 0");
while (1) {}
}
|
Add power down mode for idle circle
|
sparc: Add power down mode for idle circle
|
C
|
bsd-2-clause
|
abusalimov/embox,mike2390/embox,vrxfile/embox-trik,abusalimov/embox,Kefir0192/embox,embox/embox,embox/embox,abusalimov/embox,gzoom13/embox,gzoom13/embox,Kakadu/embox,mike2390/embox,mike2390/embox,embox/embox,gzoom13/embox,Kefir0192/embox,Kakadu/embox,Kakadu/embox,Kefir0192/embox,gzoom13/embox,mike2390/embox,Kakadu/embox,Kefir0192/embox,abusalimov/embox,abusalimov/embox,Kakadu/embox,vrxfile/embox-trik,embox/embox,gzoom13/embox,Kefir0192/embox,Kefir0192/embox,mike2390/embox,abusalimov/embox,vrxfile/embox-trik,mike2390/embox,vrxfile/embox-trik,gzoom13/embox,vrxfile/embox-trik,Kakadu/embox,vrxfile/embox-trik,Kefir0192/embox,vrxfile/embox-trik,embox/embox,mike2390/embox,embox/embox,Kakadu/embox,gzoom13/embox
|
c
|
## Code Before:
/**
* @file
* @brief Implements ARCH interface for sparc processors
*
* @date 14.02.10
* @author Eldar Abusalimov
*/
#include <hal/arch.h>
#include <asm/cache.h>
#include <hal/ipl.h>
void arch_init(void) {
cache_enable();
}
void arch_idle(void) {
}
unsigned int arch_excep_disable(void) {
unsigned int ret;
unsigned int tmp;
__asm__ __volatile__ (
"rd %%psr, %0\n\t"
"andn %0, %2, %1\n\t"
"wr %1, 0, %%psr\n\t"
" nop; nop; nop\n"
: "=&r" (ret), "=r" (tmp)
: "i" (PSR_ET)
: "memory"
);
return ret;
}
void __attribute__ ((noreturn)) arch_shutdown(arch_shutdown_mode_t mode) {
ipl_disable();
arch_excep_disable();
asm ("ta 0");
while (1) {}
}
## Instruction:
sparc: Add power down mode for idle circle
## Code After:
/**
* @file
* @brief Implements ARCH interface for sparc processors
*
* @date 14.02.10
* @author Eldar Abusalimov
*/
#include <hal/arch.h>
#include <asm/cache.h>
#include <hal/ipl.h>
void arch_init(void) {
cache_enable();
}
void arch_idle(void) {
__asm__ __volatile__ ("wr %g0, %asr19");
}
unsigned int arch_excep_disable(void) {
unsigned int ret;
unsigned int tmp;
__asm__ __volatile__ (
"rd %%psr, %0\n\t"
"andn %0, %2, %1\n\t"
"wr %1, 0, %%psr\n\t"
" nop; nop; nop\n"
: "=&r" (ret), "=r" (tmp)
: "i" (PSR_ET)
: "memory"
);
return ret;
}
void __attribute__ ((noreturn)) arch_shutdown(arch_shutdown_mode_t mode) {
ipl_disable();
arch_excep_disable();
asm ("ta 0");
while (1) {}
}
|
// ... existing code ...
}
void arch_idle(void) {
__asm__ __volatile__ ("wr %g0, %asr19");
}
unsigned int arch_excep_disable(void) {
// ... rest of the code ...
|
9a49ce93428d6e7bdfeebbed906a1868dd844169
|
anycluster/urls.py
|
anycluster/urls.py
|
from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
|
from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
|
Update url format to support Django 1.10
|
Update url format to support Django 1.10
|
Python
|
mit
|
biodiv/anycluster,biodiv/anycluster,biodiv/anycluster,biodiv/anycluster,biodiv/anycluster
|
python
|
## Code Before:
from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
## Instruction:
Update url format to support Django 1.10
## Code After:
from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
|
...
from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
...
|
8d3931fd5effabf9c5d56cb03ae15630ae984963
|
postalcodes_mexico/cli.py
|
postalcodes_mexico/cli.py
|
"""Console script for postalcodes_mexico."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for postalcodes_mexico."""
click.echo("Replace this message by putting your code into "
"postalcodes_mexico.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
"""Console script for postalcodes_mexico."""
import sys
import click
from postalcodes_mexico import postalcodes_mexico
@click.command()
@click.argument('postalcode', type=str)
def main(postalcode):
"""Console script for postalcodes_mexico."""
places = postalcodes_mexico.places(postalcode)
click.echo(places)
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
Create simple CLI for the `places` function
|
Create simple CLI for the `places` function
|
Python
|
mit
|
FlowFX/postalcodes_mexico
|
python
|
## Code Before:
"""Console script for postalcodes_mexico."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for postalcodes_mexico."""
click.echo("Replace this message by putting your code into "
"postalcodes_mexico.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
## Instruction:
Create simple CLI for the `places` function
## Code After:
"""Console script for postalcodes_mexico."""
import sys
import click
from postalcodes_mexico import postalcodes_mexico
@click.command()
@click.argument('postalcode', type=str)
def main(postalcode):
"""Console script for postalcodes_mexico."""
places = postalcodes_mexico.places(postalcode)
click.echo(places)
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
# ... existing code ...
import sys
import click
from postalcodes_mexico import postalcodes_mexico
@click.command()
@click.argument('postalcode', type=str)
def main(postalcode):
"""Console script for postalcodes_mexico."""
places = postalcodes_mexico.places(postalcode)
click.echo(places)
return 0
# ... rest of the code ...
|
150dad224dd985762714b73e9a91d084efb11e06
|
ob_pipelines/sample.py
|
ob_pipelines/sample.py
|
import os
from luigi import Parameter
from ob_airtable import get_record_by_name, get_record
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
S3_BUCKET = os.environ.get('S3_BUCKET')
def get_samples(expt_id):
expt = get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
sample = get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
class Sample(object):
sample_id = Parameter()
@property
def sample(self):
if not hasattr(self, '_sample'):
self._sample = get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
def sample_folder(self):
return '{expt}/{sample}'.format(
bucket=S3_BUCKET,
expt = self.experiment['Name'],
sample=self.sample_id)
@property
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
self._experiment = get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
|
import os
from luigi import Parameter
from ob_airtable import AirtableClient
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
S3_BUCKET = os.environ.get('S3_BUCKET')
client = AirtableClient()
def get_samples(expt_id):
expt = client.get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
sample = client.get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
class Sample(object):
sample_id = Parameter()
@property
def sample(self):
if not hasattr(self, '_sample'):
self._sample = client.get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
def sample_folder(self):
return '{expt}/{sample}'.format(
bucket=S3_BUCKET,
expt = self.experiment['Name'],
sample=self.sample_id)
@property
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
self._experiment = client.get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
|
Update to match changes in ob-airtable
|
Update to match changes in ob-airtable
|
Python
|
apache-2.0
|
outlierbio/ob-pipelines,outlierbio/ob-pipelines,outlierbio/ob-pipelines
|
python
|
## Code Before:
import os
from luigi import Parameter
from ob_airtable import get_record_by_name, get_record
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
S3_BUCKET = os.environ.get('S3_BUCKET')
def get_samples(expt_id):
expt = get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
sample = get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
class Sample(object):
sample_id = Parameter()
@property
def sample(self):
if not hasattr(self, '_sample'):
self._sample = get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
def sample_folder(self):
return '{expt}/{sample}'.format(
bucket=S3_BUCKET,
expt = self.experiment['Name'],
sample=self.sample_id)
@property
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
self._experiment = get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
## Instruction:
Update to match changes in ob-airtable
## Code After:
import os
from luigi import Parameter
from ob_airtable import AirtableClient
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
S3_BUCKET = os.environ.get('S3_BUCKET')
client = AirtableClient()
def get_samples(expt_id):
expt = client.get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
sample = client.get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
class Sample(object):
sample_id = Parameter()
@property
def sample(self):
if not hasattr(self, '_sample'):
self._sample = client.get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
def sample_folder(self):
return '{expt}/{sample}'.format(
bucket=S3_BUCKET,
expt = self.experiment['Name'],
sample=self.sample_id)
@property
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
self._experiment = client.get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
|
// ... existing code ...
import os
from luigi import Parameter
from ob_airtable import AirtableClient
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
// ... modified code ...
S3_BUCKET = os.environ.get('S3_BUCKET')
client = AirtableClient()
def get_samples(expt_id):
expt = client.get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
sample = client.get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
...
@property
def sample(self):
if not hasattr(self, '_sample'):
self._sample = client.get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
...
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
self._experiment = client.get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
// ... rest of the code ...
|
1071e256aff979a58d74bbf3127e3b8a771fee1b
|
tests/regression/61-evalAssert/01-union_evalAssert.c
|
tests/regression/61-evalAssert/01-union_evalAssert.c
|
// PARAM: --set trans.activated[+] "assert"
// Running the assert transformation on this test yields in code that is not compilable by gcc
struct s {
int a;
int b;
};
union u {
struct s str;
int i;
};
int main(){
union u un;
struct s* ptr;
un.str.a = 1;
un.str.b = 2;
ptr = &un.str;
int r;
int x;
if(r){
x = 2;
} else {
x = 3;
}
return 0;
}
|
// PARAM: --set trans.activated[+] "assert"
// Running the assert transformation on this test used to yield code that cannot be compiled with gcc, due to superfluous offsets on a pointer
struct s {
int a;
int b;
};
union u {
struct s str;
int i;
};
int main(){
union u un;
struct s* ptr;
un.str.a = 1;
un.str.b = 2;
ptr = &un.str;
int r;
int x;
if(r){
x = 2;
} else {
x = 3;
}
return 0;
}
|
Update comment in test case.
|
Update comment in test case.
|
C
|
mit
|
goblint/analyzer,goblint/analyzer,goblint/analyzer,goblint/analyzer,goblint/analyzer
|
c
|
## Code Before:
// PARAM: --set trans.activated[+] "assert"
// Running the assert transformation on this test yields in code that is not compilable by gcc
struct s {
int a;
int b;
};
union u {
struct s str;
int i;
};
int main(){
union u un;
struct s* ptr;
un.str.a = 1;
un.str.b = 2;
ptr = &un.str;
int r;
int x;
if(r){
x = 2;
} else {
x = 3;
}
return 0;
}
## Instruction:
Update comment in test case.
## Code After:
// PARAM: --set trans.activated[+] "assert"
// Running the assert transformation on this test used to yield code that cannot be compiled with gcc, due to superfluous offsets on a pointer
struct s {
int a;
int b;
};
union u {
struct s str;
int i;
};
int main(){
union u un;
struct s* ptr;
un.str.a = 1;
un.str.b = 2;
ptr = &un.str;
int r;
int x;
if(r){
x = 2;
} else {
x = 3;
}
return 0;
}
|
// ... existing code ...
// PARAM: --set trans.activated[+] "assert"
// Running the assert transformation on this test used to yield code that cannot be compiled with gcc, due to superfluous offsets on a pointer
struct s {
int a;
int b;
// ... rest of the code ...
|
0b13092a7854fe2d967d057221420a57b7a37b16
|
linter.py
|
linter.py
|
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
Change module docstring to make Travis CI build pass
|
Change module docstring to make Travis CI build pass
|
Python
|
mit
|
jackbrewer/SublimeLinter-contrib-stylint
|
python
|
## Code Before:
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
## Instruction:
Change module docstring to make Travis CI build pass
## Code After:
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
...
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
...
|
8df58655f5a7a46a781fc0e126b148943a8d5b50
|
tests/sentry/metrics/test_datadog.py
|
tests/sentry/metrics/test_datadog.py
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
Remove no longer valid test
|
Remove no longer valid test
|
Python
|
bsd-3-clause
|
BuildingLink/sentry,mvaled/sentry,jean/sentry,kevinlondon/sentry,imankulov/sentry,mitsuhiko/sentry,nicholasserra/sentry,ifduyue/sentry,gencer/sentry,fotinakis/sentry,mvaled/sentry,alexm92/sentry,alexm92/sentry,kevinlondon/sentry,beeftornado/sentry,looker/sentry,korealerts1/sentry,jean/sentry,beeftornado/sentry,fotinakis/sentry,ngonzalvez/sentry,JackDanger/sentry,ngonzalvez/sentry,nicholasserra/sentry,JamesMura/sentry,jean/sentry,gencer/sentry,daevaorn/sentry,imankulov/sentry,JamesMura/sentry,JackDanger/sentry,zenefits/sentry,alexm92/sentry,gencer/sentry,BayanGroup/sentry,ifduyue/sentry,felixbuenemann/sentry,mvaled/sentry,ifduyue/sentry,looker/sentry,ifduyue/sentry,mitsuhiko/sentry,BuildingLink/sentry,korealerts1/sentry,daevaorn/sentry,JamesMura/sentry,Natim/sentry,gencer/sentry,mvaled/sentry,Natim/sentry,zenefits/sentry,ngonzalvez/sentry,Kryz/sentry,BayanGroup/sentry,looker/sentry,daevaorn/sentry,JackDanger/sentry,daevaorn/sentry,imankulov/sentry,BuildingLink/sentry,JamesMura/sentry,ifduyue/sentry,zenefits/sentry,nicholasserra/sentry,BuildingLink/sentry,gencer/sentry,fotinakis/sentry,mvaled/sentry,Kryz/sentry,kevinlondon/sentry,JamesMura/sentry,Kryz/sentry,felixbuenemann/sentry,jean/sentry,fotinakis/sentry,Natim/sentry,looker/sentry,jean/sentry,beeftornado/sentry,zenefits/sentry,korealerts1/sentry,felixbuenemann/sentry,zenefits/sentry,BayanGroup/sentry,mvaled/sentry,BuildingLink/sentry,looker/sentry
|
python
|
## Code Before:
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
## Instruction:
Remove no longer valid test
## Code After:
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
// ... existing code ...
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
// ... rest of the code ...
|
da6e9416e12ce71cd3f23ded9bd75dccc62d26fe
|
fcn/config.py
|
fcn/config.py
|
import os.path as osp
def get_data_dir():
this_dir = osp.dirname(osp.abspath(__file__))
return osp.realpath(osp.join(this_dir, '../data'))
def get_logs_dir():
this_dir = osp.dirname(osp.abspath(__file__))
return osp.realpath(osp.join(this_dir, '../logs'))
|
import os.path as osp
def get_data_dir():
this_dir = osp.dirname(osp.abspath(__file__))
return osp.realpath(osp.join(this_dir, '_data'))
|
Move data directory in package
|
Move data directory in package
|
Python
|
mit
|
wkentaro/fcn
|
python
|
## Code Before:
import os.path as osp
def get_data_dir():
this_dir = osp.dirname(osp.abspath(__file__))
return osp.realpath(osp.join(this_dir, '../data'))
def get_logs_dir():
this_dir = osp.dirname(osp.abspath(__file__))
return osp.realpath(osp.join(this_dir, '../logs'))
## Instruction:
Move data directory in package
## Code After:
import os.path as osp
def get_data_dir():
this_dir = osp.dirname(osp.abspath(__file__))
return osp.realpath(osp.join(this_dir, '_data'))
|
# ... existing code ...
def get_data_dir():
this_dir = osp.dirname(osp.abspath(__file__))
return osp.realpath(osp.join(this_dir, '_data'))
# ... rest of the code ...
|
5089846e116fdd386de692f187f7c03304cfcd1d
|
attachments_to_filesystem/__openerp__.py
|
attachments_to_filesystem/__openerp__.py
|
{
"name": "Move existing attachments to filesystem",
"version": "1.0",
"author": "Therp BV",
"license": "AGPL-3",
"complexity": "normal",
"category": "Knowledge Management",
"depends": [
'base',
],
"data": [
"data/ir_cron.xml",
"data/init.xml",
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': ['dateutil', 'pytz'],
},
}
|
{
"name": "Move existing attachments to filesystem",
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"complexity": "normal",
"category": "Knowledge Management",
"depends": [
'base',
],
"data": [
"data/ir_cron.xml",
"data/init.xml",
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': ['dateutil', 'pytz'],
},
}
|
Add Odoo Community Association (OCA) in authors
|
Add Odoo Community Association (OCA) in authors
|
Python
|
agpl-3.0
|
xpansa/knowledge,Endika/knowledge,sergiocorato/knowledge,algiopensource/knowledge,anas-taji/knowledge,acsone/knowledge,acsone/knowledge,ClearCorp-dev/knowledge,xpansa/knowledge,Endika/knowledge,Endika/knowledge,sergiocorato/knowledge,jobiols/knowledge,anas-taji/knowledge,xpansa/knowledge,ClearCorp/knowledge,ClearCorp-dev/knowledge,sergiocorato/knowledge,Endika/knowledge,algiopensource/knowledge,acsone/knowledge,algiopensource/knowledge,jobiols/knowledge,ClearCorp-dev/knowledge,jobiols/knowledge,raycarnes/knowledge,raycarnes/knowledge,xpansa/knowledge,sergiocorato/knowledge,anas-taji/knowledge,raycarnes/knowledge,raycarnes/knowledge,algiopensource/knowledge,anas-taji/knowledge,acsone/knowledge,jobiols/knowledge,ClearCorp/knowledge,ClearCorp-dev/knowledge,ClearCorp/knowledge,ClearCorp/knowledge
|
python
|
## Code Before:
{
"name": "Move existing attachments to filesystem",
"version": "1.0",
"author": "Therp BV",
"license": "AGPL-3",
"complexity": "normal",
"category": "Knowledge Management",
"depends": [
'base',
],
"data": [
"data/ir_cron.xml",
"data/init.xml",
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': ['dateutil', 'pytz'],
},
}
## Instruction:
Add Odoo Community Association (OCA) in authors
## Code After:
{
"name": "Move existing attachments to filesystem",
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"complexity": "normal",
"category": "Knowledge Management",
"depends": [
'base',
],
"data": [
"data/ir_cron.xml",
"data/init.xml",
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': ['dateutil', 'pytz'],
},
}
|
# ... existing code ...
{
"name": "Move existing attachments to filesystem",
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"complexity": "normal",
"category": "Knowledge Management",
# ... rest of the code ...
|
d852bf648f3423a8bc0f2b277c1c25d3f63d7e4a
|
analysis/import/CodeMaatCouplingImporter/src/main/kotlin/de/maibornwolff/codecharta/importer/codemaat/CSVRow.kt
|
analysis/import/CodeMaatCouplingImporter/src/main/kotlin/de/maibornwolff/codecharta/importer/codemaat/CSVRow.kt
|
package de.maibornwolff.codecharta.importer.codemaat
import de.maibornwolff.codecharta.model.*
import java.util.*
class CSVRow(private val row: Array<String?>, private val header: CSVHeader, private val pathSeparator: Char) {
init {
if (row.size <= header.pathColumn) {
throw IllegalArgumentException(
"Row " + Arrays.toString(row) + " has no column containing the file path. Should be in " + header.pathColumn + "th column.")
}
}
fun getFileNameFromPath(path: String): String {
return path.substring(path.lastIndexOf(pathSeparator) + 1)
}
fun asDependency(): Dependency {
val entityPath = Path(attributes.get("entity")!!).edgesList.first()
val coupledPath = Path(attributes.get("coupled")!!).edgesList.first()
return Dependency(
entityPath,
getFileNameFromPath(entityPath),
coupledPath,
getFileNameFromPath(coupledPath),
attributes.get("degree")!!.toInt(),
attributes.get("average-revs")!!.toInt()
)
}
private fun validAttributeOfRow(i: Int) =
i < row.size && row[i] != null
private val attributes =
header.columnNumbers
.filter { validAttributeOfRow(it) }
.associateBy(
{ header.getColumnName(it) },
{ row[it]!! }
)
}
|
package de.maibornwolff.codecharta.importer.codemaat
import de.maibornwolff.codecharta.model.*
import java.util.*
class CSVRow(private val row: Array<String?>, private val header: CSVHeader, private val pathSeparator: Char) {
init {
if (row.size <= header.pathColumn) {
throw IllegalArgumentException(
"Row " + Arrays.toString(row) + " has no column containing the file path. Should be in " + header.pathColumn + "th column.")
}
}
fun getFileNameFromPath(path: String): String {
return path.substring(path.lastIndexOf(pathSeparator) + 1)
}
fun asDependency(): Dependency {
val rootNode = "/root/"
val entityPath = rootNode + Path(attributes.get("entity")!!).edgesList.first()
val coupledPath = rootNode + Path(attributes.get("coupled")!!).edgesList.first()
return Dependency(
entityPath,
getFileNameFromPath(entityPath),
coupledPath,
getFileNameFromPath(coupledPath),
attributes.get("degree")!!.toInt(),
attributes.get("average-revs")!!.toInt()
)
}
private fun validAttributeOfRow(i: Int) =
i < row.size && row[i] != null
private val attributes =
header.columnNumbers
.filter { validAttributeOfRow(it) }
.associateBy(
{ header.getColumnName(it) },
{ row[it]!! }
)
}
|
Update Prefix Path in dependencies
|
Update Prefix Path in dependencies
|
Kotlin
|
bsd-3-clause
|
MaibornWolff/codecharta,MaibornWolff/codecharta,MaibornWolff/codecharta,MaibornWolff/codecharta,MaibornWolff/codecharta,MaibornWolff/codecharta,MaibornWolff/codecharta,MaibornWolff/codecharta
|
kotlin
|
## Code Before:
package de.maibornwolff.codecharta.importer.codemaat
import de.maibornwolff.codecharta.model.*
import java.util.*
class CSVRow(private val row: Array<String?>, private val header: CSVHeader, private val pathSeparator: Char) {
init {
if (row.size <= header.pathColumn) {
throw IllegalArgumentException(
"Row " + Arrays.toString(row) + " has no column containing the file path. Should be in " + header.pathColumn + "th column.")
}
}
fun getFileNameFromPath(path: String): String {
return path.substring(path.lastIndexOf(pathSeparator) + 1)
}
fun asDependency(): Dependency {
val entityPath = Path(attributes.get("entity")!!).edgesList.first()
val coupledPath = Path(attributes.get("coupled")!!).edgesList.first()
return Dependency(
entityPath,
getFileNameFromPath(entityPath),
coupledPath,
getFileNameFromPath(coupledPath),
attributes.get("degree")!!.toInt(),
attributes.get("average-revs")!!.toInt()
)
}
private fun validAttributeOfRow(i: Int) =
i < row.size && row[i] != null
private val attributes =
header.columnNumbers
.filter { validAttributeOfRow(it) }
.associateBy(
{ header.getColumnName(it) },
{ row[it]!! }
)
}
## Instruction:
Update Prefix Path in dependencies
## Code After:
package de.maibornwolff.codecharta.importer.codemaat
import de.maibornwolff.codecharta.model.*
import java.util.*
class CSVRow(private val row: Array<String?>, private val header: CSVHeader, private val pathSeparator: Char) {
init {
if (row.size <= header.pathColumn) {
throw IllegalArgumentException(
"Row " + Arrays.toString(row) + " has no column containing the file path. Should be in " + header.pathColumn + "th column.")
}
}
fun getFileNameFromPath(path: String): String {
return path.substring(path.lastIndexOf(pathSeparator) + 1)
}
fun asDependency(): Dependency {
val rootNode = "/root/"
val entityPath = rootNode + Path(attributes.get("entity")!!).edgesList.first()
val coupledPath = rootNode + Path(attributes.get("coupled")!!).edgesList.first()
return Dependency(
entityPath,
getFileNameFromPath(entityPath),
coupledPath,
getFileNameFromPath(coupledPath),
attributes.get("degree")!!.toInt(),
attributes.get("average-revs")!!.toInt()
)
}
private fun validAttributeOfRow(i: Int) =
i < row.size && row[i] != null
private val attributes =
header.columnNumbers
.filter { validAttributeOfRow(it) }
.associateBy(
{ header.getColumnName(it) },
{ row[it]!! }
)
}
|
# ... existing code ...
}
fun asDependency(): Dependency {
val rootNode = "/root/"
val entityPath = rootNode + Path(attributes.get("entity")!!).edgesList.first()
val coupledPath = rootNode + Path(attributes.get("coupled")!!).edgesList.first()
return Dependency(
entityPath,
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.