text
stringlengths 27
775k
|
---|
# frozen_string_literal: true
require 'lic/vendor/molinillo/lib/molinillo/compatibility'
require 'lic/vendor/molinillo/lib/molinillo/gem_metadata'
require 'lic/vendor/molinillo/lib/molinillo/errors'
require 'lic/vendor/molinillo/lib/molinillo/resolver'
require 'lic/vendor/molinillo/lib/molinillo/modules/ui'
require 'lic/vendor/molinillo/lib/molinillo/modules/specification_provider'
# Lic::Molinillo is a generic dependency resolution algorithm.
module Lic::Molinillo
end
|
package com.acme.verification.confroles;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import com.acme.model.UMLOperation;
import com.acme.model.UMLRole;
import com.acme.model.impl.UMLRoleImpl;
import com.acme.verification.confroles.ConflictingRolesPolicy.GRANT;
import org.junit.Test;
public class ConflictingRolesPolicyTest {
ConflictingRolesPolicy policy = new ConflictingRolesPolicy();
@Test
public void testNoRole() {
List<UMLRole> userRoles = Collections.emptyList();
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(Collections.emptyList());
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), is(empty()));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), is(empty()));
}
@Test
public void testSingleMatchingRole() {
List<UMLRole> userRoles = asRoles("admin");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(asRoles("admin"));
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), is(empty()));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), is(empty()));
}
@Test
public void testRolelessOperationWithSingleRole() {
List<UMLRole> userRoles = asRoles("admin");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(Collections.emptyList());
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), is(empty()));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), is(empty()));
}
@Test
public void testRolelessOperationWithMultipleRoles() {
List<UMLRole> userRoles = asRoles("admin", "user");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(Collections.emptyList());
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), is(empty()));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), is(empty()));
}
@Test
public void testSingleRoleOperationWithNoRole() {
List<UMLRole> userRoles = Collections.emptyList();
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(asRoles("admin"));
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), hasItem(asRole("admin")));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), hasItem(asRole("admin")));
}
@Test
public void testSingleRoleOperationWithMismatchingRole() {
List<UMLRole> userRoles = asRoles("user");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(asRoles("admin"));
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), hasItem(asRole("admin")));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), hasItem(asRole("admin")));
}
@Test
public void testSingleRoleOperationWithMultipleMismatchingRoles() {
List<UMLRole> userRoles = asRoles("user", "officer");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(asRoles("admin"));
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), hasItem(asRole("admin")));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), hasItem(asRole("admin")));
}
@Test
public void testSingleRoleOperationWithSingleMatchingRoles() {
List<UMLRole> userRoles = asRoles("user", "officer", "admin");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(asRoles("admin"));
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), is(empty()));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), is(empty()));
}
@Test
public void testMultipleRoleOperationWithSingleMatchingRoles() {
List<UMLRole> userRoles = asRoles("user", "officer", "admin");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(asRoles("admin", "officer"));
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL), is(empty()));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL), is(empty()));
}
@Test
public void testMultipleRoleOperationWithNoMatchingRole() {
List<UMLRole> userRoles = asRoles("user", "reporter");
UMLOperation operation = mock(UMLOperation.class);
when(operation.roles()).thenReturn(asRoles("admin", "officer"));
assertThat(policy.apply(userRoles, operation, GRANT.GRANT_ALL),
hasItems(asRoles("admin", "officer").toArray(new UMLRole[0])));
assertThat(policy.apply(userRoles, operation, GRANT.DENY_ALL),
hasItems(asRoles("admin", "officer").toArray(new UMLRole[0])));
}
// -----------------------------------------------------------------------------------------------
private static UMLRole asRole(String role) {
return new UMLRoleImpl(role);
}
private static List<UMLRole> asRoles(String... roles) {
return Arrays.asList(roles).stream().map(UMLRoleImpl::new).collect(Collectors.toList());
}
}
|
package com.sk.outlay.ui.transactions
import androidx.compose.material.Text
import androidx.compose.runtime.Composable
@Composable
fun AddTransactionUI(value: String) {
Text(value)
}
|
#!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright:: Copyright 2020, Google LLC
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example gets the forecasted run-of-network traffic data.
require 'ad_manager_api'
def get_traffic_data(ad_manager)
# Get the ForecastService and the NetworkService.
forecast_service = ad_manager.service(:ForecastService, API_VERSION)
network_service = ad_manager.service(:NetworkService, API_VERSION)
# Set the root ad unit to target the entire network.
root_ad_unit_id =
network_service.get_current_network()[:effective_root_ad_unit_id].to_i
# Create a start date that's 7 days in the past and an end date that's 7 days
# in the future.
today = ad_manager.today
start_date = today - 7
end_date = today + 7
# Create targeting.
targeting = {
:inventory_targeting => {
:targeted_ad_units => [
{
:include_descendants => true,
:ad_unit_id => root_ad_unit_id
}
]
}
}
# Request the traffic forecast data.
traffic_data = forecast_service.get_traffic_data({
:targeting => targeting,
:requested_date_range => {
:start_date => start_date.to_h, :end_date => end_date.to_h
}
})
# Get historical time series, start date, and end date.
historical_time_series = traffic_data[:historical_time_series]
if historical_time_series.nil?
puts "No historical data to display."
else
historical_date_range = historical_time_series[:time_series_date_range]
historical_start_date = ad_manager.date(historical_date_range[:start_date])
historical_end_date = ad_manager.date(historical_date_range[:end_date])
# Convert historical end date to a native ruby date so that it can be passed
# as an argument to the `step` method.
historical_end_date = historical_end_date.to_date
# Display historical data by stepping through each day between start date
# and end date.
puts "Historical Data:"
historical_start_date.step(historical_end_date).each_with_index do |d, i|
puts "#{d}: #{historical_time_series[:values][i]}"
end
end
# Get forecasted time series, start date, and end date.
forecasted_time_series = traffic_data[:forecasted_time_series]
if forecasted_time_series.nil?
puts "No forecasted data to display."
else
forecasted_date_range = forecasted_time_series[:time_series_date_range]
forecasted_start_date = ad_manager.date(forecasted_date_range[:start_date])
forecasted_end_date = ad_manager.date(forecasted_date_range[:end_date])
# Convert forecasted end date to a native ruby date so that it can be passed
# as an argument to the `step` method.
forecasted_end_date = forecasted_end_date.to_date
# Display forecasted data by stepping through each day between start date
# and end date.
puts "Forecasted Data:"
forecasted_start_date.step(forecasted_end_date).each_with_index do |d, i|
puts "#{d}: #{forecasted_time_series[:values][i]}"
end
end
end
if __FILE__ == $0
API_VERSION = :v202105
# Get AdManagerApi instance and load configuration from ~/ad_manager_api.yml.
ad_manager = AdManagerApi::Api.new
# To enable logging of SOAP requests, set the log_level value to 'DEBUG' in
# the configuration file or provide your own logger:
# ad_manager.logger = Logger.new('ad_manager_xml.log')
begin
# Set the advertiser to assign the prospective line item to.
# This allows for forecasting while taking into account
# same advertiser exclusion.
get_traffic_data(ad_manager)
# HTTP errors.
rescue AdsCommon::Errors::HttpError => e
puts "HTTP Error: %s" % e
# API errors.
rescue AdManagerApi::Errors::ApiException => e
puts "Message: %s" % e.message
puts 'Errors:'
e.errors.each_with_index do |error, index|
puts "\tError [%d]:" % (index + 1)
error.each do |field, value|
puts "\t\t%s: %s" % [field, value]
end
end
end
end
|
import os
import numpy as np
import tensorflow as tf
from cwb.common.config_parser import construct_single_distribution
g_num_gaussians = 5
g_num_mixtures = 3
g_num_components = 3
g_num_cubes = 3
g_data_dir = 'data/'
g_result_dir = 'result/'
g_stats_dir = 'stats/'
g_evolve_dir = 'evolve/'
g_vis_dir = 'vis/'
g_cwb_working_dir = 'cwb_tmp/'
g_claici_working_dir = 'claici_tmp/'
g_result_filename_dict = {
'cwb': 'cwb.npz',
'cuturi': 'cuturi.pkl',
'claici': 'claici.npy',
'bregman': 'bregman.npz',
'exact_lp': 'exact_lp.npz',
'conv': 'conv.npz',
'staib': 'staib.npz',
'gaussian_iterative': 'gaussian_iterative.pkl'}
def get_discrete_num(dim, method):
if method == 'conv':
return 100000
else:
return 1000
return None
def get_result_filename(method, rep):
return '{:02}-{}'.format(rep, g_result_filename_dict[method])
def get_stats_filename(method, rep):
return '{:02}-{}'.format(rep, method + '.pkl')
def get_evolve_filename(method, rep):
return '{:02}-{}'.format(rep, method + '.pkl')
def get_data_nd_dir(dim):
return os.path.join(g_data_dir, '{}d'.format(dim))
def get_result_nd_dir(dim):
return os.path.join(g_result_dir, '{}d'.format(dim))
def get_stats_nd_dir(dim):
return os.path.join(g_stats_dir, '{}d'.format(dim))
def get_evolve_nd_dir(dim):
return os.path.join(g_evolve_dir, '{}d'.format(dim))
def get_working_nd_dir(kind, dim, repeat):
if kind == 'cwb':
return os.path.join(g_cwb_working_dir, '{}d-{:04}'.format(dim, repeat))
elif kind == 'claici':
return os.path.join(g_claici_working_dir, '{}d-{:04}'.format(dim, repeat))
return None
def get_vis_nd_dir(dim):
return os.path.join(g_vis_dir, '{}d'.format(dim))
def get_result_file_path(dim, method, rep):
return os.path.join(get_result_nd_dir(dim), get_result_filename(method, rep))
def get_stats_file_path(dim, method, rep):
return os.path.join(get_stats_nd_dir(dim), get_stats_filename(method, rep))
def get_vis_file_path(dim, method):
return os.path.join(get_vis_nd_dir(dim), method + '.png')
def make_uniform_hist(n):
return np.ones([n]) / n
def load_result_by_method(exp_result_file, method):
metadata = {}
if method == 'cwb':
result = np.load(exp_result_file)
samples = result
samples_count = samples.shape[0]
hist = make_uniform_hist(samples_count)
elif method in ['cuturi', 'claici']:
samples = np.load(exp_result_file)
samples_count = samples.shape[0]
hist = make_uniform_hist(samples_count)
elif method in ['bregman', 'exact_lp', 'conv', 'staib']:
result = np.load(exp_result_file)
samples = result['discrete_points']
hist = result['hist']
if 'width' in result:
metadata['width'] = result['width']
metadata['height'] = result['height']
else:
raise Exception('Unknown method: {}'.format(method))
return (hist, samples, metadata)
def load_source_list(data_dir):
source_list = []
file_list = sorted(os.listdir(data_dir)) # consistent order
for p in file_list:
if p.endswith('.pkl'):
pkl_path = os.path.join(data_dir, p)
nu = construct_single_distribution({'pkl_path': pkl_path}, tf.float32)
source_list.append(nu)
return source_list
def check_is_uniform(hist):
return np.all(hist == hist[0])
def make_uniform_samples(hist, supp, sample_count):
n = supp.shape[0]
if check_is_uniform(hist):
if n < sample_count:
return supp
inds = np.random.choice(n, size=[sample_count], replace=False)
return supp[inds, :]
replace = False if sample_count <= n else True
inds = np.random.choice(n, size=[sample_count], replace=replace, p=hist)
return supp[inds, :]
|
package golib
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
)
var (
executable, pathError = os.Executable()
configDir = flag.String("conf", filepath.Dir(executable), "config.json at directory")
)
// LoadConfig is read the configuration from the JSON file into the structure
func LoadConfig(config interface{}, customPath string) error {
if pathError != nil {
return pathError
}
var configPath string
if customPath != "" {
configPath = customPath
} else {
flag.Parse()
// The default is the same directory as the executable file
if configDir == nil {
configPath = filepath.Dir(executable)
} else {
configPath = (*configDir)
}
}
// Read from config.json
jsonValue, err := ioutil.ReadFile(filepath.Join(configPath, "config.json"))
if err != nil {
return fmt.Errorf("Read file Error: %s", err)
}
// Parse to JSON
if err := json.Unmarshal(jsonValue, config); err != nil {
return fmt.Errorf("Unmarshal error: %s", err)
}
return nil
}
|
# For face related tests, look in the spec/unit/faces/module folder.
# For integration tests, which test the behavior of module, look in the
# spec/unit/integration folder.
|
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@file:Suppress("NOTHING_TO_INLINE", "unused", "DEPRECATION")
package com.google.accompanist.insets
import androidx.compose.foundation.layout.height
import androidx.compose.ui.Modifier
import androidx.compose.ui.composed
import androidx.compose.ui.layout.IntrinsicMeasurable
import androidx.compose.ui.layout.IntrinsicMeasureScope
import androidx.compose.ui.layout.LayoutModifier
import androidx.compose.ui.layout.Measurable
import androidx.compose.ui.layout.MeasureResult
import androidx.compose.ui.layout.MeasureScope
import androidx.compose.ui.unit.Constraints
import androidx.compose.ui.unit.Density
import androidx.compose.ui.unit.Dp
import androidx.compose.ui.unit.dp
/**
* Represents a horizontal side of the display.
*/
@Deprecated(
"""
accompanist/insets is deprecated.
The androidx.compose equivalent of HorizontalSide is using Modifier.windowInsetsStartWidth or
Modifier.windowInsetsEndWidth with the desired type of WindowInsets.
For more migration information, please visit https://google.github.io/accompanist/insets/#migration
"""
)
enum class HorizontalSide { Left, Right }
/**
* Represents a vertical side of the display.
*/
@Deprecated(
"""
accompanist/insets is deprecated.
The androidx.compose equivalent of VerticalSide is using Modifier.windowInsetsTopHeight or
Modifier.windowInsetsBottomHeight with the desired type of WindowInsets.
For more migration information, please visit https://google.github.io/accompanist/insets/#migration
"""
)
enum class VerticalSide { Top, Bottom }
/**
* Declare the height of the content to match the height of the status bars exactly.
*
* This is very handy when used with `Spacer` to push content below the status bars:
* ```
* Column {
* Spacer(Modifier.statusBarHeight())
*
* // Content to be drawn below status bars (y-axis)
* }
* ```
*
* It's also useful when used to draw a scrim which matches the status bars:
* ```
* Spacer(
* Modifier.statusBarHeight()
* .fillMaxWidth()
* .drawBackground(MaterialTheme.colors.background.copy(alpha = 0.3f)
* )
* ```
*
* Internally this matches the behavior of the [Modifier.height] modifier.
*
* @param additional Any additional height to add to the status bars size.
*/
@Deprecated(
"""
accompanist/insets is deprecated.
For more migration information, please visit https://google.github.io/accompanist/insets/#migration
""",
replaceWith = ReplaceWith(
"windowInsetsTopHeight(WindowInsets.statusBars)",
"androidx.compose.foundation.layout.WindowInsets",
"androidx.compose.foundation.layout.statusBars",
"androidx.compose.foundation.layout.windowInsetsTopHeight"
)
)
fun Modifier.statusBarsHeight(
additional: Dp = 0.dp,
): Modifier = composed {
InsetsSizeModifier(
insetsType = LocalWindowInsets.current.statusBars,
heightSide = VerticalSide.Top,
additionalHeight = additional
)
}
/**
* Declare the preferred height of the content to match the height of the navigation bars when
* present at the bottom of the screen.
*
* This is very handy when used with `Spacer` to push content below the navigation bars:
* ```
* Column {
* // Content to be drawn above status bars (y-axis)
* Spacer(Modifier.navigationBarHeight())
* }
* ```
*
* It's also useful when used to draw a scrim which matches the navigation bars:
* ```
* Spacer(
* Modifier.navigationBarHeight()
* .fillMaxWidth()
* .drawBackground(MaterialTheme.colors.background.copy(alpha = 0.3f)
* )
* ```
*
* Internally this matches the behavior of the [Modifier.height] modifier.
*
* @param additional Any additional height to add to the status bars size.
*/
@Deprecated(
"""
accompanist/insets is deprecated.
For more migration information, please visit https://google.github.io/accompanist/insets/#migration
""",
replaceWith = ReplaceWith(
"windowInsetsBottomHeight(WindowInsets.navigationBars)",
"androidx.compose.foundation.layout.WindowInsets",
"androidx.compose.foundation.layout.navigationBars",
"androidx.compose.foundation.layout.windowInsetsBottomHeight"
)
)
fun Modifier.navigationBarsHeight(
additional: Dp = 0.dp
): Modifier = composed {
InsetsSizeModifier(
insetsType = LocalWindowInsets.current.navigationBars,
heightSide = VerticalSide.Bottom,
additionalHeight = additional
)
}
/**
* Declare the preferred width of the content to match the width of the navigation bars,
* on the given [side].
*
* This is very handy when used with `Spacer` to push content inside from any vertical
* navigation bars (typically when the device is in landscape):
* ```
* Row {
* Spacer(Modifier.navigationBarWidth(HorizontalSide.Left))
*
* // Content to be inside the navigation bars (x-axis)
*
* Spacer(Modifier.navigationBarWidth(HorizontalSide.Right))
* }
* ```
*
* It's also useful when used to draw a scrim which matches the navigation bars:
* ```
* Spacer(
* Modifier.navigationBarWidth(HorizontalSide.Left)
* .fillMaxHeight()
* .drawBackground(MaterialTheme.colors.background.copy(alpha = 0.3f)
* )
* ```
*
* Internally this matches the behavior of the [Modifier.height] modifier.
*
* @param side The navigation bar side to use as the source for the width.
* @param additional Any additional width to add to the status bars size.
*/
@Deprecated(
"""
accompanist/insets is deprecated.
For more migration information, please visit https://google.github.io/accompanist/insets/#migration
""",
replaceWith = ReplaceWith(
"windowInsetsStartWidth(WindowInsets.navigationBars).windowInsetsEndWidth(WindowInsets.systemBars)",
"androidx.compose.foundation.layout.WindowInsets",
"androidx.compose.foundation.layout.navigationBars",
"androidx.compose.foundation.layout.windowInsetsEndWidth",
"androidx.compose.foundation.layout.windowInsetsStartWidth"
)
)
fun Modifier.navigationBarsWidth(
side: HorizontalSide,
additional: Dp = 0.dp
): Modifier = composed {
InsetsSizeModifier(
insetsType = LocalWindowInsets.current.navigationBars,
widthSide = side,
additionalWidth = additional
)
}
/**
* [Modifier] class which powers the modifiers above. This is the lower level modifier which
* supports the functionality through a number of parameters.
*
* We may make this public at some point. If you need this, please let us know via the
* issue tracker.
*/
private data class InsetsSizeModifier(
private val insetsType: WindowInsets.Type,
private val widthSide: HorizontalSide? = null,
private val additionalWidth: Dp = 0.dp,
private val heightSide: VerticalSide? = null,
private val additionalHeight: Dp = 0.dp
) : LayoutModifier {
private val Density.targetConstraints: Constraints
get() {
val additionalWidthPx = additionalWidth.roundToPx()
val additionalHeightPx = additionalHeight.roundToPx()
return Constraints(
minWidth = additionalWidthPx + when (widthSide) {
HorizontalSide.Left -> insetsType.left
HorizontalSide.Right -> insetsType.right
null -> 0
},
minHeight = additionalHeightPx + when (heightSide) {
VerticalSide.Top -> insetsType.top
VerticalSide.Bottom -> insetsType.bottom
null -> 0
},
maxWidth = when (widthSide) {
HorizontalSide.Left -> insetsType.left + additionalWidthPx
HorizontalSide.Right -> insetsType.right + additionalWidthPx
null -> Constraints.Infinity
},
maxHeight = when (heightSide) {
VerticalSide.Top -> insetsType.top + additionalHeightPx
VerticalSide.Bottom -> insetsType.bottom + additionalHeightPx
null -> Constraints.Infinity
}
)
}
override fun MeasureScope.measure(
measurable: Measurable,
constraints: Constraints
): MeasureResult {
val wrappedConstraints = targetConstraints.let { targetConstraints ->
val resolvedMinWidth = if (widthSide != null) {
targetConstraints.minWidth
} else {
constraints.minWidth.coerceAtMost(targetConstraints.maxWidth)
}
val resolvedMaxWidth = if (widthSide != null) {
targetConstraints.maxWidth
} else {
constraints.maxWidth.coerceAtLeast(targetConstraints.minWidth)
}
val resolvedMinHeight = if (heightSide != null) {
targetConstraints.minHeight
} else {
constraints.minHeight.coerceAtMost(targetConstraints.maxHeight)
}
val resolvedMaxHeight = if (heightSide != null) {
targetConstraints.maxHeight
} else {
constraints.maxHeight.coerceAtLeast(targetConstraints.minHeight)
}
Constraints(
resolvedMinWidth,
resolvedMaxWidth,
resolvedMinHeight,
resolvedMaxHeight
)
}
val placeable = measurable.measure(wrappedConstraints)
return layout(placeable.width, placeable.height) {
placeable.place(0, 0)
}
}
override fun IntrinsicMeasureScope.minIntrinsicWidth(
measurable: IntrinsicMeasurable,
height: Int
) = measurable.minIntrinsicWidth(height).let {
val constraints = targetConstraints
it.coerceIn(constraints.minWidth, constraints.maxWidth)
}
override fun IntrinsicMeasureScope.maxIntrinsicWidth(
measurable: IntrinsicMeasurable,
height: Int
) = measurable.maxIntrinsicWidth(height).let {
val constraints = targetConstraints
it.coerceIn(constraints.minWidth, constraints.maxWidth)
}
override fun IntrinsicMeasureScope.minIntrinsicHeight(
measurable: IntrinsicMeasurable,
width: Int
) = measurable.minIntrinsicHeight(width).let {
val constraints = targetConstraints
it.coerceIn(constraints.minHeight, constraints.maxHeight)
}
override fun IntrinsicMeasureScope.maxIntrinsicHeight(
measurable: IntrinsicMeasurable,
width: Int
) = measurable.maxIntrinsicHeight(width).let {
val constraints = targetConstraints
it.coerceIn(constraints.minHeight, constraints.maxHeight)
}
}
|
import { NavigationContainer } from "@react-navigation/native"
import { createStackNavigator, StackScreenProps, TransitionPresets } from "@react-navigation/stack"
import { OnboardingPersonalization_highlights } from "__generated__/OnboardingPersonalization_highlights.graphql"
import { OnboardingPersonalizationListQuery } from "__generated__/OnboardingPersonalizationListQuery.graphql"
import { ArtistListItemContainer as ArtistListItem, ArtistListItemPlaceholder } from "lib/Components/ArtistListItem"
import { Disappearable } from "lib/Components/Disappearable"
import { INPUT_HEIGHT } from "lib/Components/Input/Input"
import SearchIcon from "lib/Icons/SearchIcon"
import { GlobalStore } from "lib/store/GlobalStore"
import { renderWithPlaceholder } from "lib/utils/renderWithPlaceholder"
import { compact, times } from "lodash"
import { Box, Button, color, Flex, Join, space, Spacer, Text } from "palette"
import React, { useEffect, useRef, useState } from "react"
import { FlatList, ScrollView, TouchableWithoutFeedback } from "react-native"
import { SafeAreaView } from "react-native-safe-area-context"
import { createRefetchContainer, graphql, QueryRenderer, RelayRefetchProp } from "react-relay"
import { defaultEnvironment } from "../../../relay/createEnvironment"
import { OnboardingPersonalizationModalQueryRenderer } from "./OnboardingPersonalizationModal"
// tslint:disable-next-line:interface-over-type-literal
export type OnboardingPersonalizationNavigationStack = {
OnboardingPersonalizationList: undefined
OnboardingPersonalizationModal: undefined
}
const StackNavigator = createStackNavigator<OnboardingPersonalizationNavigationStack>()
export const OnboardingPersonalization = () => {
return (
<NavigationContainer independent>
<StackNavigator.Navigator
headerMode="screen"
screenOptions={{
...TransitionPresets.ModalTransition,
headerShown: false,
}}
>
<StackNavigator.Screen
name="OnboardingPersonalizationList"
component={OnboardingPersonalizationListQueryRenderer}
/>
<StackNavigator.Screen
name="OnboardingPersonalizationModal"
component={OnboardingPersonalizationModalQueryRenderer}
/>
</StackNavigator.Navigator>
</NavigationContainer>
)
}
interface OnboardingPersonalizationListNavigationProps
extends StackScreenProps<OnboardingPersonalizationNavigationStack, "OnboardingPersonalizationList"> {}
interface OnboardingPersonalizationListProps extends OnboardingPersonalizationListNavigationProps {
highlights: OnboardingPersonalization_highlights
relay: RelayRefetchProp
}
const OnboardingPersonalizationListHeader = ({ navigateToModal }: { navigateToModal: () => void }) => (
<>
<Box px={2}>
<Text variant="largeTitle">What artists do you collect?</Text>
<Spacer mt={1.5} />
<Text variant="caption" color={color("black60")}>
Follow at least three artists you’re looking to collect or track so we can personalize your experience.
</Text>
</Box>
<Spacer mt={20} />
{/* Fake search Input */}
<Flex px={2}>
<TouchableWithoutFeedback onPress={navigateToModal} testID="searchArtistButton">
<Flex flexDirection="row" borderWidth={1} borderColor={color("black10")} height={INPUT_HEIGHT}>
<Flex pl="1" justifyContent="center" flexGrow={0}>
<SearchIcon width={18} height={18} />
</Flex>
<Flex flexGrow={1} justifyContent="center" pl={1}>
<Text color={color("black60")} fontSize={15}>
Search artists
</Text>
</Flex>
</Flex>
</TouchableWithoutFeedback>
</Flex>
</>
)
export const OnboardingPersonalizationList: React.FC<OnboardingPersonalizationListProps> = ({ ...props }) => {
const popularArtists = compact(props.highlights.popularArtists)
const animatedOpacitiesRef = useRef<{ [key: string]: Disappearable | null }>({})
const [excludeArtistIDs, setExcludeArtistIDs] = useState<string[]>([])
const updateListOfArtists = (artistID: string) => {
if (excludeArtistIDs.includes(artistID)) {
return
}
setExcludeArtistIDs(excludeArtistIDs.concat(artistID))
}
useEffect(() => {
props.relay.refetch({ excludeArtistIDs })
}, [excludeArtistIDs])
const fadeRow = (artistID: string) => {
animatedOpacitiesRef.current[artistID]?.disappear()
}
return (
<SafeAreaView style={{ backgroundColor: "white", flexGrow: 1 }}>
<ScrollView
contentContainerStyle={{
paddingTop: 60,
paddingBottom: 80,
justifyContent: "flex-start",
}}
>
<OnboardingPersonalizationListHeader
navigateToModal={() => {
props.navigation.navigate("OnboardingPersonalizationModal")
}}
/>
<FlatList
data={popularArtists}
initialNumToRender={8}
renderItem={({ item: artist }) => (
<Disappearable ref={(ref) => (animatedOpacitiesRef.current[artist.internalID] = ref)} animateScale={false}>
<ArtistListItem
artist={artist}
withFeedback
containerStyle={{ paddingHorizontal: 20, paddingVertical: 10 }}
onFollowFinish={() => {
updateListOfArtists(artist.internalID)
fadeRow(artist.internalID)
}}
/>
</Disappearable>
)}
keyExtractor={(artist) => artist.internalID}
contentContainerStyle={{ paddingVertical: space(2) }}
/>
</ScrollView>
<Flex p={2} position="absolute" bottom={0} backgroundColor="white">
<Button
variant="primaryBlack"
block
testID="doneButton"
onPress={() => {
GlobalStore.actions.auth.setState({ onboardingState: "complete" })
}}
>
Done
</Button>
</Flex>
</SafeAreaView>
)
}
export const OnboardingPersonalizationListRefetchContainer = createRefetchContainer(
OnboardingPersonalizationList,
{
highlights: graphql`
fragment OnboardingPersonalization_highlights on Highlights
@argumentDefinitions(excludeArtistIDs: { type: "[String]" }) {
popularArtists(excludeFollowedArtists: true, excludeArtistIDs: $excludeArtistIDs) {
internalID
...ArtistListItem_artist
}
}
`,
},
graphql`
query OnboardingPersonalizationListRefetchQuery($excludeArtistIDs: [String]) {
highlights {
...OnboardingPersonalization_highlights @arguments(excludeArtistIDs: $excludeArtistIDs)
}
}
`
)
const OnboardingPersonalizationListQueryRenderer: React.FC<OnboardingPersonalizationListNavigationProps> = (props) => (
<QueryRenderer<OnboardingPersonalizationListQuery>
environment={defaultEnvironment}
query={graphql`
query OnboardingPersonalizationListQuery {
highlights {
...OnboardingPersonalization_highlights
}
}
`}
variables={{}}
render={renderWithPlaceholder({
Container: OnboardingPersonalizationListRefetchContainer,
renderPlaceholder: OnboardingPersonalizationListPlaceholder,
initialProps: props,
})}
/>
)
const OnboardingPersonalizationListPlaceholder = ({
navigation,
}: {
navigation: OnboardingPersonalizationListNavigationProps["navigation"]
}) => (
<SafeAreaView
style={{
backgroundColor: "white",
flexGrow: 1,
}}
>
<Spacer height={60} />
<OnboardingPersonalizationListHeader
navigateToModal={() => {
navigation.navigate("OnboardingPersonalizationModal")
}}
/>
<Flex px={2} mt={2}>
<Join separator={<Spacer height={20} />}>
{times(10).map((index: number) => (
<Flex key={index}>
<ArtistListItemPlaceholder />
</Flex>
))}
</Join>
</Flex>
</SafeAreaView>
)
|
(ns ring.middleware.file-info
"Middleware to add Last-Modified and Content-Type headers to file responses.
This middleware is deprecated. Prefer the ring.middleware.content-type and
ring.middleware.not-modified middleware instead."
(:require [ring.util.response :as res]
[ring.util.mime-type :refer [ext-mime-type]]
[ring.util.io :refer [last-modified-date]])
(:import [java.io File]
[java.util Date Locale TimeZone]
[java.text SimpleDateFormat]))
(defn- guess-mime-type
"Returns a String corresponding to the guessed mime type for the given file,
or application/octet-stream if a type cannot be guessed."
[^File file mime-types]
(or (ext-mime-type (.getPath file) mime-types)
"application/octet-stream"))
(defn- ^SimpleDateFormat make-http-format
"Formats or parses dates into HTTP date format (RFC 822/1123)."
[]
;; SimpleDateFormat is not threadsafe, so return a new instance each time
(doto (SimpleDateFormat. "EEE, dd MMM yyyy HH:mm:ss ZZZ" Locale/US)
(.setTimeZone (TimeZone/getTimeZone "UTC"))))
(defn- not-modified-since?
"Has the file been modified since the last request from the client?"
[{headers :headers :as req} last-modified]
(if-let [modified-since (headers "if-modified-since")]
(not (.before (.parse (make-http-format) modified-since)
last-modified))))
(defn file-info-response
"Adds headers to response as described in wrap-file-info."
{:added "1.2", :deprecated "1.2"}
([response request]
(file-info-response response request {}))
([response request mime-types]
(let [body (:body response)]
(if (instance? File body)
(let [file-type (guess-mime-type body mime-types)
file-length (.length ^File body)
lmodified (last-modified-date body)
response (-> response
(res/content-type file-type)
(res/header
"Last-Modified"
(.format (make-http-format) lmodified)))]
(if (not-modified-since? request lmodified)
(-> response
(res/status 304)
(res/header "Content-Length" 0)
(assoc :body ""))
(-> response (res/header "Content-Length" file-length))))
response))))
(defn wrap-file-info
"Wrap a handler such that responses with a file for a body will have
corresponding Content-Type, Content-Length, and Last Modified headers added if
they can be determined from the file.
If the request specifies a If-Modified-Since header that matches the last
modification date of the file, a 304 Not Modified response is returned.
If two arguments are given, the second is taken to be a map of file extensions
to content types that will supplement the default, built-in map."
{:deprecated "1.2"}
([handler]
(wrap-file-info handler {}))
([handler mime-types]
(fn
([request]
(-> (handler request)
(res/bind file-info-response request mime-types)))
([request respond raise]
(handler request
(fn [response]
(respond (file-info-response response request mime-types)))
raise)))))
|
/**
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
*
* The Apereo Foundation licenses this file to you under the Educational
* Community License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License
* at:
*
* http://opensource.org/licenses/ecl2.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package org.opencastproject.job.api;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.opencastproject.job.api.Job.Status;
import org.opencastproject.security.api.OrganizationDirectoryService;
import org.opencastproject.security.api.SecurityService;
import org.opencastproject.security.api.UserDirectoryService;
import org.opencastproject.serviceregistry.api.ServiceRegistry;
import org.opencastproject.serviceregistry.api.SystemLoad;
import org.opencastproject.serviceregistry.api.SystemLoad.NodeLoad;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.easymock.IAnswer;
import org.junit.Before;
import org.junit.Test;
public class AbstractJobProducerTest extends EasyMockSupport {
private JobProducerTest jobProducer;
private ServiceRegistry serviceRegistry;
@Before
public void setUp() throws Exception {
serviceRegistry = createNiceMock(ServiceRegistry.class);
expect(serviceRegistry.count(JobProducerTest.JOB_TYPE, Status.DISPATCHING)).andReturn(2L).anyTimes();
expect(serviceRegistry.count(JobProducerTest.JOB_TYPE, Status.RUNNING)).andReturn(3L).anyTimes();
final Capture<Job> job = EasyMock.newCapture();
expect(serviceRegistry.updateJob(EasyMock.capture(job))).andAnswer(new IAnswer<Job>() {
@Override
public Job answer() throws Throwable {
return job.getValue();
}
});
SecurityService securityService = createNiceMock(SecurityService.class);
UserDirectoryService userDirectoryService = createNiceMock(UserDirectoryService.class);
OrganizationDirectoryService organizationDirectoryService = createNiceMock(OrganizationDirectoryService.class);
jobProducer = new JobProducerTest(serviceRegistry, securityService, userDirectoryService,
organizationDirectoryService);
}
@Test
public void testGetType() throws Exception {
replayAll();
assertEquals("test", jobProducer.getJobType());
}
@Test
public void testIsReadyToAcceptJobs() throws Exception {
replayAll();
assertTrue(jobProducer.isReadyToAcceptJobs("any operation"));
}
@Test
public void testCountJobs() throws Exception {
replayAll();
assertEquals(2, jobProducer.countJobs(Status.DISPATCHING));
assertEquals(3, jobProducer.countJobs(Status.RUNNING));
}
@Test
public void testAcceptJob() throws Exception {
replayAll();
Job job = new JobImpl();
job.setStatus(Status.DISPATCHING);
assertEquals(Status.DISPATCHING, job.getStatus());
jobProducer.acceptJob(job);
assertEquals(Status.RUNNING, job.getStatus());
}
@Test
public void testIsReadyToAccept() throws Exception {
expect(serviceRegistry.getRegistryHostname()).andReturn("test").anyTimes();
expect(serviceRegistry.getMaxLoadOnNode("test")).andReturn(new NodeLoad("test", 4.0f)).anyTimes();
SystemLoad systemLoad = new SystemLoad();
systemLoad.addNodeLoad(new NodeLoad("test", 3.0f));
expect(serviceRegistry.getCurrentHostLoads(true)).andReturn(systemLoad);
SystemLoad systemLoad2 = new SystemLoad();
systemLoad2.addNodeLoad(new NodeLoad("test", 12.0f));
expect(serviceRegistry.getCurrentHostLoads(true)).andReturn(systemLoad2);
SystemLoad systemLoad3 = new SystemLoad();
systemLoad3.addNodeLoad(new NodeLoad("test", 5.0f));
expect(serviceRegistry.getCurrentHostLoads(true)).andReturn(systemLoad3);
replayAll();
Job job = new JobImpl(3);
job.setStatus(Status.DISPATCHING);
job.setProcessingHost("same");
// Job load lower than max load and enough free load available
job.setJobLoad(1.0f);
assertTrue(jobProducer.isReadyToAccept(job));
// Job load higher than max load but some load on host
job.setJobLoad(10.0f);
assertFalse(jobProducer.isReadyToAccept(job));
// Job load higher than max load and no load on host
job.setJobLoad(5.0f);
assertTrue(jobProducer.isReadyToAccept(job));
}
private class JobProducerTest extends AbstractJobProducer {
public static final String JOB_TYPE = "test";
private ServiceRegistry serviceRegistry;
private SecurityService securityService;
private UserDirectoryService userDirectoryService;
private OrganizationDirectoryService organizationDirectoryService;
JobProducerTest(ServiceRegistry serviceRegistry, SecurityService securityService,
UserDirectoryService userDirectoryService, OrganizationDirectoryService organizationDirectoryService) {
super(JOB_TYPE);
this.serviceRegistry = serviceRegistry;
this.securityService = securityService;
this.userDirectoryService = userDirectoryService;
this.organizationDirectoryService = organizationDirectoryService;
}
@Override
protected ServiceRegistry getServiceRegistry() {
return serviceRegistry;
}
@Override
protected SecurityService getSecurityService() {
return securityService;
}
@Override
protected UserDirectoryService getUserDirectoryService() {
return userDirectoryService;
}
@Override
protected OrganizationDirectoryService getOrganizationDirectoryService() {
return organizationDirectoryService;
}
@Override
protected String process(Job job) throws Exception {
return null;
}
}
}
|
<?php
/**
* @package Fuel\FileSystem
* @version 2.0
* @author Fuel Development Team
* @license MIT License
* @copyright 2010 - 2015 Fuel Development Team
* @link http://fuelphp.com
*/
namespace Fuel\FileSystem;
abstract class Handler
{
/**
* @var string
*/
protected $path;
/**
* @param string $path
*/
public function __construct($path)
{
$this->path = $path;
}
/**
* Checks whether a file/dir exists
*
* @return boolean
*/
public function exists()
{
return file_exists($this->path);
}
/**
* Deletes a file/dir
*
* @return boolean
*/
public function delete()
{
return unlink($this->path);
}
/**
* Moves a file/dir
*
* @return boolean
*/
public function moveTo($destination)
{
return $this->renameTo($destination);
}
/**
* Renames a file/dir
*
* @return boolean
*/
public function renameTo($name)
{
if (strpos($name, DIRECTORY_SEPARATOR) !== 0)
{
$name = pathinfo($this->path, PATHINFO_DIRNAME).DIRECTORY_SEPARATOR.$name;
}
if ( ! pathinfo($name, PATHINFO_EXTENSION))
{
$name .= '.'.pathinfo($this->path, PATHINFO_EXTENSION);
}
if ($result = rename($this->path, $name))
{
$this->path = realpath($name);
}
return $result;
}
/**
* Creates a symlink to a file/dir
*
* @return boolean
*/
public function symlinkTo($destination)
{
return symlink($this->path, $destination);
}
/**
* Checks wether a file/dir is writable
*
* @return boolean
*/
public function isWritable()
{
return is_writable($this->path);
}
/**
* Checks wether a file/dir is readable
*
* @return boolean
*/
public function isReadable()
{
return is_readable($this->path);
}
/**
* Retrieves wether the path is a file or a dir
*
* @return string
*/
public function getType()
{
return filetype($this->path);
}
/**
* Retrieves the last access time
*
* @return integer
*/
public function getAccessTime()
{
return fileatime($this->path);
}
/**
* Retrieves the last modified time
*
* @return integer
*/
public function getModifiedTime()
{
return filemtime($this->path);
}
/**
* Retrieves the created time
*
* @return integer
*/
public function getCreatedTime()
{
return filectime($this->path);
}
/**
* Retrieves the permissions
*
* @return integer
*/
public function getPermissions()
{
return fileperms($this->path);
}
/**
* Sets the permissions
*
* @return boolean
*/
public function setPermissions($permissions)
{
if (is_string($permissions))
{
$permissions = '0'.ltrim($permissions, '0');
$permissions = octdec($permissions);
}
return chmod($this->path, $permissions);
}
/**
* Retrieves the path
*
* @return string
*/
public function getPath()
{
return $this->path;
}
/**
* Converts to path
*
* @return string
*/
public function __toString()
{
return $this->getPath();
}
}
|
package it.enlea.chirper.unit.logic;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import it.enlea.chirper.logic.service.PostService;
import it.enlea.chirper.logic.service.SocialNetworkService;
import it.enlea.chirper.logic.service.parameter.PostParameters;
import it.enlea.chirper.logic.service.parameter.RequestParametersInterface;
import it.enlea.chirper.repository.PostRepository;
import it.enlea.chirper.repository.SessionPostRepository;
class PostServiceTest {
SocialNetworkService command;
PostRepository postRepository;
@BeforeEach
void initCommand() {
postRepository = new SessionPostRepository();
command = new PostService(postRepository);
}
@Test
void insertAPostShouldReturnEmptyStringAndShouldIncreaseThePostNumbers() {
String userName = "elsa";
int previousMessageNum = postRepository.getPostListByUserName(userName).size();
RequestParametersInterface params = new PostParameters(userName, "let it gooo");
command.setParameter(params);
String output = command.execute();
assertEquals("",output);
assertEquals(previousMessageNum+1,postRepository.getPostListByUserName(userName).size() );
}
}
|
// Copyright (c) 深圳云企微商网络科技有限公司. All Rights Reserved.
// 丁川 QQ:2505111990 微信:i230760 qq群:774046050 邮箱:[email protected]
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
using System.Linq;
using DBlazor.Storage;
namespace Microsoft.Extensions.DependencyInjection
{
/// <summary>
/// 添加和使用本地化存储的扩展方法
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// 注册前端本地存储服务
/// </summary>
/// <param name="services">服务集合</param>
public static IServiceCollection AddDCStorage(this IServiceCollection services)
{
if (services.FirstOrDefault(d => d.ServiceType == typeof(ILocalStorageService)) == null)
{
services
.AddScoped<ILocalStorageService, LocalStorageService>() // 每次请求都会获取一个新的实例,但同一个请求内获取多次都只会得到相同的实例。
.AddScoped<ISyncLocalStorageService, LocalStorageService>();
services
.AddScoped<ISessionStorageService, SessionStorageService>()
.AddScoped<ISyncSessionStorageService, SessionStorageService>();
}
return services;
}
}
}
|
(ns multiformats.varint
"Implementation of an MSB unsigned variable-size integer.
Unsigned integers are serialized 7 bits at a time, starting with the
least-significant bits. The highest bit (msb) in each output byte indicates
if there is a continuation byte.
https://github.com/multiformats/unsigned-varint"
(:require
[alphabase.bytes :as b]))
;; ## Encoding
(defn write-bytes
"Write a value as a varint to a byte array at the given offset. Returns the
number of bytes written."
[value ^bytes buffer offset]
(when (neg? value)
(throw (ex-info "Varints are unsigned and cannot be negative"
{:value value})))
(loop [v value
i 0]
; Check for index out of bounds.
(when (<= (alength buffer) (+ offset i))
(throw (ex-info
(str "Varint write index out of bounds at position "
(+ offset i) " (" i " bytes from offset " offset ")")
{:offset (+ offset i)})))
; Check for overflow.
(when (<= 9 i)
(throw (ex-info
"Varints larger than nine bytes are not currently supported"
{:value value})))
(if (<= 0x80 v)
; Write continuation byte and recur.
(let [b (bit-or (bit-and v 0x7F) 0x80)]
(b/set-byte buffer (+ offset i) b)
(recur (unsigned-bit-shift-right v 7)
(inc i)))
; Final byte.
(let [b (bit-and v 0x7F)]
(b/set-byte buffer (+ offset i) b)
(inc i)))))
(defn encode
"Encode a value as a sequence of varint bytes. Returns the encoded byte
array."
^bytes
[value]
(let [buffer (b/byte-array 9)
length (write-bytes value buffer 0)
result (b/byte-array length)]
(b/copy buffer 0 result 0 length)
result))
;; ## Decoding
(defn read-bytes
"Read bytes from the byte array at the given offset. Returns a tuple with the
decoded varint and the number of bytes read."
[^bytes data offset]
(loop [i offset
n 0
v 0]
; Check for index out of bounds.
(when (<= (alength data) i)
(throw (ex-info
(str "Ran out of bytes to decode at position " i
" (" n " bytes from offset " offset ")")
{:offset offset
:length (alength data)})))
; Check for overflow of soft limit.
(when (<= 9 n)
(throw (ex-info
"Varints larger than nine bytes are not currently supported"
{:offset offset})))
; Decode next byte.
(let [b (b/get-byte data i)]
(if (< b 0x80)
; Final byte.
[(bit-or (bit-shift-left b (* 7 n)) v)
(inc n)]
; Continuation byte. Add masked lower bits and recur.
(recur (inc i)
(inc n)
(bit-or (bit-shift-left (bit-and b 0x7F) (* 7 n)) v))))))
(defn decode
"Decode a byte array as a varint value. Returns the decoded value.
This is a shorthand for reading the bytes at the beginning of the array and
ignoring any extra data."
[buffer]
(first (read-bytes buffer 0)))
|
import play.api.db.Database
import play.api.test.{FakeRequest, PlaySpecification, WithApplication}
import sd.Uid2Name
class SmsChargeSpec extends PlaySpecification {
"/1pay/charge" should {
"signature error when the Form not contain enough data" in new WithApplication {
val req = FakeRequest(GET, "/1pay/charge")
.withFormUrlEncodedBody(
"access_key" -> ""
)
val Some(result) = route(app, req)
status(result) must equalTo(OK)
val js = contentAsJson(result)
(js \ "type").asOpt[String] must beSome("text")
(js \ "status").asOpt[Int] must beSome(0)
(js \ "sms").asOpt[String] must beSome.which(
_.startsWith("Tin nhan sai cu phap.")
)
}
"signature error when the signature is not match" in new WithApplication {
val req = FakeRequest(GET, "/1pay/charge")
.withFormUrlEncodedBody(
"access_key" -> "access_key",
"amount" -> "10000",
"command_code" -> "GAME1",
"error_code" -> "error_code",
"error_message" -> "error_message",
"mo_message" -> "MI NAP10 dunglp",
"msisdn" -> "84988888888",
"request_id" -> "request_id",
"request_time" -> "2013-07-06T22:54:50Z",
"signature" -> "invalid signature"
)
val Some(result) = route(app, req)
status(result) must equalTo(OK)
val js = contentAsJson(result)
(js \ "type").asOpt[String] must beSome("text")
(js \ "sms").asOpt[String] must beSome.which(
_.startsWith("Tin nhan sai cu phap.")
)
(js \ "status").asOpt[Int] must beSome(0)
}
"signature error when can't get uid from mo_message" in new WithApplication {
val req = FakeRequest(GET, "/1pay/charge")
.withFormUrlEncodedBody(
SignData(
"access_key" -> "access_key",
"amount" -> "10000",
"command_code" -> "GAME1",
"error_code" -> "error_code",
"error_message" -> "error_message",
"mo_message" -> "MI NAP10 dunglp",
"msisdn" -> "84988888888",
"request_id" -> "request_id",
"request_time" -> "2013-07-06T22:54:50Z"
): _*
)
val Some(result) = route(app, req)
status(result) must equalTo(OK)
val js = contentAsJson(result)
(js \ "type").asOpt[String] must beSome("text")
(js \ "sms").asOpt[String] must beSome.which(
_.startsWith("Tin nhan sai cu phap.")
)
(js \ "status").asOpt[Int] must beSome(0)
}
"ErrId when user id not exist" in new WithApplication {
val uid = 99999999
val req = FakeRequest(GET, "/1pay/charge")
.withFormUrlEncodedBody(
SignData(
"access_key" -> "access_key",
"amount" -> "10000",
"command_code" -> "GAME1",
"error_code" -> "error_code",
"error_message" -> "error_message",
"mo_message" -> s"MI NAP10 $uid",
"msisdn" -> "84988888888",
"request_id" -> "request_id",
"request_time" -> "2013-07-06T22:54:50Z"
): _*
)
val Some(result) = route(app, req)
status(result) must equalTo(OK)
val js = contentAsJson(result)
(js \ "type").asOpt[String] must beSome("text")
(js \ "sms").asOpt[String] must beSome.which(
_.startsWith(s"So ID: $uid khong hop le.")
)
(js \ "status").asOpt[Int] must beSome(0)
}
"Ok then ErrProcessed" in new WithApplication {
import anorm._, SqlParser._
val db = app.injector.instanceOf[Database]
val reqId = "request_id_1"
db.withConnection { implicit conn =>
SQL"DELETE FROM 1pay_log WHERE request_id = $reqId".execute()
}
EnsureUser1.run(db)
val uid2Name = app.injector.instanceOf[Uid2Name]
uid2Name(1) must beSome("Trần Văn Nguyễn")
val uid = 1
val req = FakeRequest(GET, "/1pay/charge")
.withFormUrlEncodedBody(
SignData(
"access_key" -> "access_key",
"amount" -> "10000",
"command_code" -> "GAME1",
"error_code" -> "error_code",
"error_message" -> "error_message",
"mo_message" -> s"MI NAP10 $uid",
"msisdn" -> "84988888888",
"request_id" -> reqId,
"request_time" -> "2013-07-06T22:54:50Z"
): _*
)
val Some(result) = route(app, req)
status(result) must equalTo(OK)
val js = contentAsJson(result)
(js \ "type").asOpt[String] must beSome("text")
(js \ "sms").asOpt[String] must beSome.which(
_.startsWith(
"Ban da nap thanh cong 1.000.000 Bao vao tai khoan: Tran Van Nguyen."
)
)
(js \ "status").asOpt[Int] must beSome(1)
val Some(result2) = route(app, req)
status(result2) must equalTo(OK)
val js2 = contentAsJson(result2)
(js2 \ "type").asOpt[String] must beSome("text")
(js2 \ "sms").asOpt[String] must beSome.which(
_.startsWith("Tin nhan da duoc xu ly.")
)
(js2 \ "status").asOpt[Int] must beSome(0)
val coin = db.withConnection { implicit conn =>
SQL"SELECT coin FROM users WHERE id = $uid".as(scalar[Long].singleOpt)
}
coin must beSome(5000000000L + 1000000)
}
}
}
|
// Copyright (c) Lykke Corp.
// See the LICENSE file in the project root for more information.
namespace Donut.Services
{
using System.Net.Http;
public class ProxyService
{
public ProxyService()
{
this.Client = new HttpClient(new HttpClientHandler { AllowAutoRedirect = false, UseCookies = false });
}
internal HttpClient Client { get; private set; }
}
}
|
import java.util.Scanner;
public class Controller {
public static final String HELLO_PATTERN = "Hello";
public static final String WORLD_PATTERN = "world!";
private Model model;
private View view;
public Controller(Model model, View view) {
this.model = model;
this.view = view;
}
public void processUser() {
Scanner sc = new Scanner(System.in);
model.addStringToSentence(inputStrWithScanner(sc, View.INPUT_HELLO_STRING, HELLO_PATTERN));
model.addStringToSentence(inputStrWithScanner(sc, View.INPUT_WORLD_STRING, WORLD_PATTERN));
view.printMessageAndSentence(View.OUR_SENTENCE, model.getSentence());
}
private String inputStrWithScanner(Scanner sc, String inputStr, String pattern) {
view.printMessage(inputStr);
String input = sc.nextLine();
while ( !input.equals(pattern)) {
view.printMessage(View.WRONG_INPUT_STRING_DATA + inputStr);
input = sc.nextLine();
}
return input;
}
}
|
package scappla
import scala.language.experimental.macros
trait AbstractReal extends Value[Double, Unit] {
override def field = BaseField.doubleBaseField
override def shape = ()
}
|
export const SERVER_URL = 'http://localhost:3005'
export const IMAGE_PATH = `${SERVER_URL}/images/`
|
This repository is intended to house some simple AVR C
projects and examples for those just getting started
with the Atmel AVR line of microcontrollers.
|
package com.takusemba.cropmesample.ui.adapters
import android.content.Context
import android.graphics.Point
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.view.WindowManager
import android.widget.TextView
import androidx.recyclerview.widget.GridLayoutManager
import androidx.recyclerview.widget.RecyclerView
import com.takusemba.cropmesample.R
import com.takusemba.cropmesample.models.Album
import com.takusemba.cropmesample.ui.OnPhotoClickListener
/**
* Adapter to show [albums]
*/
class AlbumAdapter(
private val context: Context,
private val albums: MutableList<Album>,
private val listener: OnPhotoClickListener
) : RecyclerView.Adapter<AlbumAdapter.ViewHolder>() {
private val length: Int
init {
val point = Point()
val manager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
manager.defaultDisplay.getSize(point)
length = point.x / COLUMNS
}
override fun onCreateViewHolder(viewGroup: ViewGroup, viewType: Int): ViewHolder {
val view = LayoutInflater.from(viewGroup.context).inflate(R.layout.item_album, viewGroup, false)
return ViewHolder(view)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
val album = albums[position]
holder.title.text = album.name
val adapter = PhotoAdapter(album.photos, listener, length)
val layoutManager = GridLayoutManager(context, COLUMNS)
holder.recyclerView.layoutManager = layoutManager
holder.recyclerView.adapter = adapter
holder.recyclerView.setHasFixedSize(true)
}
override fun getItemCount(): Int {
return albums.size
}
class ViewHolder(itemView: View) : RecyclerView.ViewHolder(itemView) {
val title: TextView = itemView.findViewById(R.id.title)
val recyclerView: RecyclerView = itemView.findViewById(R.id.recycler_view)
}
fun addItem(album: Album) {
albums.add(album)
notifyDataSetChanged()
}
fun clear() {
albums.clear()
notifyDataSetChanged()
}
companion object {
private const val COLUMNS = 4
}
}
|
/* Fortran routine ortho_obl rewritten in C,
Thomas Roatsch, DLR 7-Mar-2001
CONVERT L,S TO LAT LONG OR LAT,LONG TO L,S FOR THE ORTHOGRAPHIC
PROJECTION FOR AN OBLATE SPHEROID
11SEP96 -LWK- CODE ADAPTED FROM SUBR. TRANV, FOR USE BY MP_ROUTINES
23Oct97 -Scholten- added check for THR near 90 deg.
return 0=O.K. 1=POINT OFF PLANET
M 1=DIRECT 2=INVERSE
DATA
1 X SPECIAL SAMPLE POINT
2 Z SPECIAL LINE POINT
3 TH SPECIAL LATITUDE
4 TH1 LATITUDE OF SPECIAL PARALLEL OR SPECIAL OBLIQUE LONGITUDE
5 TH2 LATITUDE OF SPECIAL PARALLEL
6 LAM SPECIAL LONGITUDE WEST
7 F SCALE (KM/PIXEL)
8 CAS +1 IF VISIBLE POLE IS N. -1 IF VISIBLE POLE IS S.
M M=2 LINE,SAMPLE TO LAT,LONG (INVERSE)
M M=1 LAT,LONG TO LINE,SAMP (DIRECT)
25 RP POLAR RADIUS (KM)
26 RE EQUATORIAL RADIUS (KM)
9 PSI NORTH ANGLE
****** ANGLES IN DATA() IN DEGREES ******
****** LAT,LONG IN RADIANS ******
****** ALL LATITUDES PLANETOCENTRI******
****** ALL LONGITUDES WEST ****** */
#include <math.h>
#include "mp_private.h"
#define SMALL 1e-8
/* special functions */
/* GEOCENTRIC RADIUS */
double gcr (double rpp, double rep, double thr)
{
double help1,help2,help3;
help1 = rpp*cos(thr);
help2 = rep*sin(thr);
help3 = rpp * rep/sqrt(help1*help1 + help2*help2);
return help3;
}
/* GEODETIC LATITUDE */
double phig (double rpp, double rep, double thr)
{
double help;
help = PI_OVER_2 - fabs(atan(-rpp*rpp/(rep*rep)*1.0/tan(thr)));
return help;
}
/* dsign from FORTRAN */
double orthodsign (double a, double b)
{
double dhelp;
if (b < 0) dhelp = - fabs(a);
else dhelp = fabs(a);
return dhelp;
}
/* dmod from FORTRAN */
double orthodmod (double a, double b)
{
double dhelp;
dhelp = a - ((int)(a/b)) * b;
return dhelp;
}
int ortho_obl_c(int m, float *data, float *line,
float *sample, float *lat, float *longi)
{
double lam,lamr,latr,longr;
double k1,k2,k3,k3sqrt,dif1[3],dif2[3],lambar,north,l;
double lat8,long8;
double xc,zc,th,thr,thr0,f,psi,psir,rp,rpp,re,rep;
double r,phi,x11,z11,x1,y1,z1,x2,y2,z2;
double c1,c2,c3,c4,ca,ce,co,sa,so;
double rlat,rlon,cphi,cpsi;
double sinlat,coslat,sinlon,coslon,sinnor,cosnor,fl;
double req,slccpc,slcspc,scpcsl,scpccl,clcc2p,clcs2p,slcc2p;
double rpsq,resq,sinlam,coslam,sinl,cosl;
double rcp,delx,xdel,delz,zdel,apoiup;
double adel,sindel,cosdel;
double dd,a1,b1,d1,a2,b2;
double alpha,beta,gamma,delta;
double alphsq,betasq,gammsq,deltsq,d1sq,c2sq,b2sq,gresq,drpsq;
double pcpx,pcpy,pcpz,rad1,rad2;
/* CONVERT ANGLES AND DIMENSIONS TO RADIANS AND PIXELS RESPECTIVELY
AND float DATA ITEMS TO double */
xc = (double) data[0];
zc = (double) data[1];
th = (double) data[2];
thr = RETURN_RADIANS(th);
if(thr == 0) thr=SMALL; /* in case center_lat=0 */
thr0 = thr;
lam = (double) data[5];
lamr = RETURN_RADIANS(lam);
f = (double) data[6];
psi = (double) data[8];
psir = RETURN_RADIANS(psi);
rp = (double) data[24];
rpp = rp / f;
re = (double) data[25];
rep = re / f;
if (m == 1 )
{ /* DIRECT */
lat8 = (double) *lat;
long8 = (double) *longi;
latr = lat8;
longr = long8;
r = gcr(rpp,rep,latr);
phi = phig(rpp,rep,thr);
phi = orthodsign(phi,thr);
x11 = -r*cos(latr)*sin(longr-lamr);
z11 = r*(sin(phi)*cos(latr)*cos(longr-lamr)-cos(phi)*sin(latr));
x1 = x11;
z1 = z11-gcr(rpp,rep,thr)*sin(phi-thr);
*sample = x1*cos(psir)-z1*sin(psir)+xc;
*line = x1*sin(psir)+z1*cos(psir)+zc;
/* BACK-OF-PLANET TEST */
c1 = cos(thr);
c2 = cos(TWO_PI-lamr);
c3 = sin(thr);
c4 = sin(TWO_PI-lamr);
ca = cos(latr);
co = cos(TWO_PI-longr);
sa = sin(latr);
so = sin(TWO_PI-longr);
ce = ca*co*c1*c2+ca*so*c1*c4+sa*c3; /* COSINE EMISSION ANGLE */
/* RETURNS .TRUE. IF POINT LAT,LON IS ON BACK OF PLANET W.R.T. TH,LAM */
if(ce < 0) return 1;
else return 0;
}
/* INVERSE */
rlat = *sample-xc;
rlon = *line-zc;
if( (rlat == 0) && (rlon ==0) )
{
*lat = thr;
*longi = lamr;
return 0;
}
cphi = th;
cpsi = lam;
north = psi;
if (fabs(thr) > RETURN_RADIANS(90.0-SMALL))
thr = orthodsign(RETURN_RADIANS(90.0-SMALL),thr);
sinlat=sin(thr);
coslat=cos(thr);
sinlon=sin(lamr);
coslon=cos(lamr);
sinnor=sin(psir);
cosnor=cos(psir);
fl=rp;
req=re;
slccpc=sinlat*coslon;
slcspc=sinlat*sinlon;
scpcsl=sinlon*coslon*sinlat;
scpccl=sinlon*coslon*coslat;
clcc2p=coslat*coslon*coslon;
clcs2p=coslat*sinlon*sinlon;
slcc2p=sinlat*coslon*coslon;
/* CALC ANGLE LAMBDA BAR */
rpsq=fl;
rpsq=rpsq*rpsq;
resq=req;
resq=resq*resq;
lambar=((coslat*coslat/resq+sinlat*sinlat/rpsq)/
sqrt((coslat*coslat/(resq*resq)+sinlat*sinlat/(rpsq*rpsq))));
if(lambar > 1) lambar=1;
lambar=acos(lambar);
lambar=RETURN_RADIANS(cphi)+lambar;
sinlam=sin(lambar);
coslam=cos(lambar);
l=RETURN_RADIANS(cphi)-lambar;
sinl=sin(l);
cosl=cos(l);
/* GET RADIUS OF PLANET AT C.P. */
rcp= gcr(rpp,rep,thr);
/* CONVERT FROM PIXELS TO KM */
rcp = f*rcp;
/* CALC.ANGLE BETWEEN UP AND POINT OF INTEREST
IN PLANE OF PROJECTION SUBTENDED AT CENTER OF PROJECTION */
delx=rlat;
xdel=delx;
delz=rlon;
zdel=delz;
apoiup=atan2(-xdel,-zdel);
/* CALC.SIN AND COS OF THE ANGLE BETWEEN THE DIRECTION OF
NORTH IN THE IMAGE PLANE AND THE POINT OF INTEREST SUBTENDED AT
THE CENTER OF PROJECTION */
adel=RETURN_RADIANS(north) + apoiup;
sindel=sin(adel);
cosdel=cos(adel);
if(sindel == 1) cosdel=0.0;
if(sindel == -1) cosdel=0.0;
/* CALC.DISTANCE OF POINT OF INTEREST FROM
CENTER OF PROJECTION IN PLANE OF PROJECTION
AT TRUE SCALE */
dd=f * sqrt( (xdel*xdel) + (zdel*zdel) );
/* CHECK WHETHER POINT OF INTEREST IS OFF PLANET */
if(req < dd) return 1;
/* CALC.COEFFIEIENTS FOR TWO PLANES NORMAL
TO PLANE OF PROJECTION.
PLANE 1 - NORMAL TO LINE CONNECTION CENTER OF PROJECTION
AND POINT OF INTEREST
PLANE 2 - CONTAINS LINE CONNECTION CENTER OF
PROJECTION AND POINT OF INTEREST
PLANE 1 A1*X+B1*Y+C1*Z+D1=0
PLANE 2 A2*X+B2*Y+C2*Z=0 */
a1=-sindel*sinlon-cosdel*coslon*sinlam;
b1=-sindel*coslon+cosdel*sinlon*sinlam;
c1=cosdel*coslam;
d1=-dd*sindel*sindel+rcp*cosdel*sinlam*coslat
-rcp*sinlat*coslam*cosdel-dd*cosdel*cosdel*slcc2p*sinlam
-dd*cosdel*cosdel*coslam*coslam
-dd*sinlam*sinlam*cosdel*cosdel*sinlon*sinlon;
a2=-cosdel*sinlon*cosl+sindel*slccpc;
b2=-cosdel*coslon*cosl-sindel*slcspc;
c2=-coslat*sindel;
/* CALCULATE PARAMETRIC VARIABLES IN
SIMULTANEOUS SOLN.OF PLANE 1,PLANE 2,AND SPHEROID */
alpha=a2*c1-a1*c2;
beta=a2*b1-a1*b2;
gamma=b1*c2-b2*c1;
delta=c1*b2-b1*c2;
/* CALCULATE X COORDINATE
EQUATION IS X=K1+OR-K2*SQRT(K3) */
alphsq=alpha*alpha;
betasq=beta*beta;
gammsq=gamma*gamma;
deltsq=delta*delta;
d1sq=d1*d1;
c2sq=c2*c2;
b2sq=b2*b2;
gresq=gammsq*resq;
drpsq=deltsq*rpsq;
z1=drpsq*(alphsq+gammsq)+betasq*gresq;
k1=((alpha*c2*d1*drpsq)+(beta*b2*d1*gresq))/z1;
k2=(gamma*delta*fl)/z1;
k3=2.0*alpha*c2*beta*b2*resq;
k3=k3+(-c2sq*drpsq-b2sq*gresq-alphsq*b2sq*resq-betasq*resq*c2sq);
k3=k3*d1sq;
k3=k3+(gresq*drpsq+drpsq*resq*alphsq+resq*betasq*gresq);
if(k3 < 0) return 1;
k3sqrt=sqrt(k3);
z1=k2*k3sqrt;
x1=k1+z1;
x2=k1-z1;
/* MAKE THE BACK OF PLANET TEST */
y1=-d1*c2;
y2=y1;
y1=(y1+alpha*x1)/gamma;
y2=(y2+alpha*x2)/gamma;
z1=(-b2*d1+beta*x1)/delta;
z2=(-b2*d1+beta*x2)/delta;
/* (X1,Y1,Z1) IS VECTOR P01
(X2,Y2,Z2) IS VECTOR P02
PCP IS VECTOR FROM PLANET CENTER TO CENTER OF PROJECTION
FIND WHICH VECTOR HAS MINIMUM LENGTH, P01-PCP OR P02-PCP */
pcpx=rcp*coslat*coslon;
pcpy=-rcp*coslat*sinlon;
pcpz=rcp*sinlat;
dif1[0]=x1-pcpx;
dif1[1]=y1-pcpy;
dif1[2]=z1-pcpz;
dif2[0]=x2-pcpx;
dif2[1]=y2-pcpy;
dif2[2]=z2-pcpz;
rad1=dif1[0]*dif1[0]+dif1[1]*dif1[1]+dif1[2]*dif1[2];
rad2=dif2[0]*dif2[0]+dif2[1]*dif2[1]+dif2[2]*dif2[2];
if(rad1 <= rad2)
{
/* POINT 1 IS VALID */
rlon=TWO_PI-atan2(y1,x1);
rlon=orthodmod(rlon+TWO_PI,TWO_PI);
rlat=(atan(fabs(z1)/sqrt(x1*x1+y1*y1)));
rlat=orthodsign(rlat,z1);
*lat=rlat;
*longi=rlon;
return 0;
}
/* POINT 2 IS VALID */
rlon=TWO_PI-atan2(y2,x2);
rlon=orthodmod(rlon+TWO_PI,TWO_PI);
rlat=(atan(fabs(z2)/sqrt(x2*x2+y2*y2)));
rlat=orthodsign(rlat,z2);
*lat=rlat;
*longi=rlon;
return 0;
}
|
#!/bin/bash
domain="antinex.com"
imagename="certbot-nginx"
full_imagename="jayjohnson/${imagename}"
archive_dir="/opt/antinex/archive"
certs_dir="/opt/antinex/certs/release"
lets_encrypt_dir="/opt/antinex/letsencrypt/${domain}"
shared_dir="/opt/antinex/shared"
splunk_dir="/opt/antinex/splunk"
static_dir="/opt/antinex/static"
web_dir="/opt/antinex/web"
if [[ ! -d ${archive_dir} ]]; then
echo "creating archive directory: ${archive_dir}"
mkdir -p -m 777 ${archive_dir}
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${archive_dir}"
echo ""
echo "mkdir -p -m 777 ${archive_dir}"
echo ""
exit 1
fi
fi
if [[ ! -d ${certs_dir} ]]; then
echo "creating x509 certs directory: ${certs_dir}"
mkdir -p -m 777 ${certs_dir}
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${certs_dir}"
echo ""
echo "mkdir -p -m 777 ${certs_dir}"
echo ""
exit 1
fi
fi
if [[ ! -d ${lets_encrypt_dir} ]]; then
echo "creating lets encrypt directory: ${lets_encrypt_dir}"
mkdir -p -m 777 ${lets_encrypt_dir}
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${lets_encrypt_dir}"
echo ""
echo "mkdir -p -m 777 ${lets_encrypt_dir}"
echo ""
exit 1
fi
fi
if [[ ! -d ${shared_dir} ]]; then
echo "creating shared directory: ${shared_dir}"
mkdir -p -m 777 ${shared_dir}
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${shared_dir}"
echo ""
echo "mkdir -p -m 777 ${shared_dir}"
echo ""
exit 1
fi
fi
if [[ ! -d ${shared_dir}/logs ]]; then
echo "creating shared logs directory: ${shared_dir}/logs"
mkdir -p -m 777 /opt/shared/logs
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${shared_dir}/logs"
echo ""
echo "mkdir -p -m 777 ${shared_dir}/logs"
echo ""
exit 1
fi
fi
if [[ ! -d ${splunk_dir} ]]; then
echo "creating optional splunk assets directory: ${splunk_dir}"
mkdir -p -m 777 ${splunk_dir}
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${splunk_dir}"
echo ""
echo "mkdir -p -m 777 ${splunk_dir}"
echo ""
exit 1
fi
fi
if [[ ! -d ${static_dir} ]]; then
echo "creating static directory: ${static_dir}"
mkdir -p -m 777 ${static_dir}
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${static_dir}"
echo ""
echo "mkdir -p -m 777 ${static_dir}"
echo ""
exit 1
fi
fi
if [[ ! -d ${web_dir} ]]; then
echo "creating static assets directory: ${web_dir}"
mkdir -p -m 777 ${web_dir}
if [[ "$?" != "0" ]]; then
echo "Failed creating director: ${web_dir}"
echo ""
echo "mkdir -p -m 777 ${web_dir}"
echo ""
exit 1
fi
fi
echo ""
echo "Starting nginx docker container with Let's Encrypt"
echo ""
echo " - Archive dir: ${archive_dir}"
echo " - x509s Certs dir: ${certs_dir}"
echo " - Lets Encrypt dir: ${lets_encrypt_dir}"
echo " - Shared dir: ${shared_dir}"
echo " - Optional Splunk dir: ${splunk_dir}"
echo " - Static Assets Splunk dir: ${web_dir}"
echo ""
echo "On success the certs are stored in dir: ${lets_encrypt_dir}"
echo ""
docker-compose -f docker-compose.yml up -d
exit 0
|
{-# LANGUAGE OverloadedStrings #-}
module HERMIT.API.Dictionary.Debug where
import Data.Aeson
import HERMIT.API.Types
-- | give a side-effect message as output when processing this command
trace :: String -> Rewrite LCoreTC
trace str = Transform $ method "trace" [toJSON str]
-- | give a side-effect message as output, and observe the value being processed
observe :: String -> Rewrite LCoreTC
observe str = Transform $ method "observe" [toJSON str]
{-|
give a side-effect message if the rewrite fails, including the failing input
-}
observeFailure :: String -> Rewrite LCoreTC -> Rewrite LCoreTC
observeFailure str r = Transform $ method "observeFailure"
[toJSON str, toJSON r]
-- | if given rewrite succeeds, see its input and output
bracket :: String -> Rewrite LCoreTC -> Rewrite LCoreTC
bracket str r = Transform $ method "bracket" [toJSON str, toJSON r]
|
package co.temy.securitysample.extentions
import android.content.res.Resources
import android.support.v4.content.res.ResourcesCompat
fun Resources.getDrawableCompat(id: Int, theme: Resources.Theme? = null) = ResourcesCompat.getDrawable(this, id, theme)
fun Resources.getColorCompat(id: Int, theme: Resources.Theme? = null) = ResourcesCompat.getColor(this, id, theme)
|
import React from 'react'
import { useEditorState, useRefEditorState } from '../../editor/store/store-hook'
import { usePropControlledRef_DANGEROUS } from '../../inspector/common/inspector-utils'
import { getControlStyles, SelectOption, Utils } from '../../../uuiui-deps'
import * as EP from '../../../core/shared/element-path'
import * as EditorActions from '../../editor/actions/action-creators'
import { UIGridRow } from '../../inspector/widgets/ui-grid-row'
import { PopupList } from '../../../uuiui'
import { JSXElementName, jsxElementNameEquals } from '../../../core/shared/element-template'
import { getElementsToTarget } from '../../inspector/common/inspector-utils'
import { Imports } from '../../../core/shared/project-file-types'
import {
getComponentGroupsAsSelectOptions,
InsertableComponent,
} from '../../../components/shared/project-components'
import { usePossiblyResolvedPackageDependencies } from '../../../components/editor/npm-dependency/npm-dependency'
import { MetadataUtils } from '../../../core/model/element-metadata-utils'
export const RenderAsRow = React.memo(() => {
const dispatch = useEditorState((store) => {
return store.dispatch
}, 'RenderAsRow dispatch')
const selectedElementName = useEditorState((store) => {
return MetadataUtils.getJSXElementNameFromMetadata(
store.editor.jsxMetadata,
store.editor.selectedViews[0],
)
}, 'RenderAsRow selectedElementName')
const refElementsToTargetForUpdates = useRefEditorState((store) => {
return getElementsToTarget(store.editor.selectedViews)
})
const onElementTypeChange = React.useCallback(
(newElementName: JSXElementName, importsToAdd: Imports) => {
const actions = refElementsToTargetForUpdates.current.flatMap((path) => {
return EditorActions.updateJSXElementName(path, newElementName, importsToAdd)
})
dispatch(actions, 'everyone')
},
[dispatch, refElementsToTargetForUpdates],
)
const onSelect = React.useCallback(
(selectOption: SelectOption) => {
const value: InsertableComponent = selectOption.value
onElementTypeChange(value.element.name, value.importsToAdd)
},
[onElementTypeChange],
)
const dependencies = usePossiblyResolvedPackageDependencies()
const { packageStatus, propertyControlsInfo, projectContents, fullPath } = useEditorState(
(store) => {
return {
packageStatus: store.editor.nodeModules.packageStatus,
propertyControlsInfo: store.editor.propertyControlsInfo,
projectContents: store.editor.projectContents,
fullPath: store.editor.canvas.openFile?.filename ?? null,
}
},
'RenderAsRow',
)
const insertableComponents = React.useMemo(() => {
if (fullPath == null) {
return []
} else {
return getComponentGroupsAsSelectOptions(
packageStatus,
propertyControlsInfo,
projectContents,
dependencies,
fullPath,
)
}
}, [packageStatus, propertyControlsInfo, projectContents, dependencies, fullPath])
const currentInsertableComponent: SelectOption | undefined = React.useMemo(() => {
if (selectedElementName != null) {
const nameToSearchFor: JSXElementName = selectedElementName
for (const selectOptionGroup of insertableComponents) {
for (const selectOption of selectOptionGroup.options ?? []) {
const insertableComponent: InsertableComponent = selectOption.value
if (insertableComponent != null) {
if (jsxElementNameEquals(insertableComponent.element.name, nameToSearchFor)) {
return selectOption
}
}
}
}
}
return undefined
}, [insertableComponents, selectedElementName])
return (
<UIGridRow padded={true} variant='<---1fr--->|------172px-------|'>
<span
style={{
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
Render as
</span>
{insertableComponents.length > 0 ? (
<PopupList
disabled={false}
value={currentInsertableComponent}
onSubmitValue={onSelect}
options={insertableComponents}
containerMode='default'
/>
) : null}
</UIGridRow>
)
})
|
package com.vicpin.krealmextensions
import android.support.test.runner.AndroidJUnit4
import com.google.common.truth.Truth
import com.vicpin.krealmextensions.model.TestEntity
import com.vicpin.krealmextensions.model.TestEntityAutoPK
import com.vicpin.krealmextensions.model.TestEntityPK
import com.vicpin.krealmextensions.util.TestRealmConfigurationFactory
import io.realm.Realm
import io.realm.Sort
import org.junit.After
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.runner.RunWith
import java.util.concurrent.CountDownLatch
/**
* Created by victor on 10/1/17.
* Test based on global functions
*/
@RunWith(AndroidJUnit4::class)
class KRealmExtensionsGlobalTests {
@get:Rule
var configFactory = TestRealmConfigurationFactory()
lateinit var realm: Realm
lateinit var latch: CountDownLatch
var latchReleased = false
@Before
fun setUp() {
val realmConfig = configFactory.createConfiguration()
realm = Realm.getInstance(realmConfig)
latch = CountDownLatch(1)
}
@After
fun tearDown() {
deleteAll<TestEntity>()
deleteAll<TestEntityPK>()
deleteAll<TestEntityAutoPK>()
realm.close()
latchReleased = false
}
/**
* PERSISTENCE TESTS
*/
@Test
fun testPersistEntityWithSaveMethodManaged() {
val result = TestEntity().saveManaged(realm) //No exception expected
Truth.assertThat(result.isManaged)
Truth.assertThat(count<TestEntity>(realm)).isEqualTo(1)
}
@Test
fun testPersistPKEntityWithSaveMethodManaged() {
val result = TestEntityPK(1).saveManaged(realm) //No exception expected
Truth.assertThat(result.isManaged).isTrue()
Truth.assertThat(count<TestEntityPK>(realm)).isEqualTo(1)
}
@Test
fun testCountPKEntity() {
val list = listOf(TestEntityPK(1), TestEntityPK(2), TestEntityPK(3))
list.saveAll()
Truth.assertThat(count<TestEntityPK>()).isEqualTo(3)
}
@Test
fun testCountDuplicatePKEntity() {
val list = listOf(TestEntityPK(1), TestEntityPK(1), TestEntityPK(1))
list.saveAll()
Truth.assertThat(count<TestEntityPK>()).isEqualTo(1)
}
@Test
fun testCountEntity() {
val list = listOf(TestEntity(), TestEntity(), TestEntity())
list.saveAll()
Truth.assertThat(count<TestEntity>()).isEqualTo(3)
}
/**
* PERSISTENCE TEST WITH AUTO PRIMARY KEY
*/
@Test
fun testPersistAutoPKEntityWithSaveMethodShouldHavePK() {
TestEntityAutoPK().save()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(1)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(1)
TestEntityAutoPK().save()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(2)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(2)
TestEntityAutoPK().save()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(3)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(3)
}
@Test
fun testPersistAutoPkEntityWithPkShouldNotBeOverrided() {
TestEntityAutoPK(4, "").save()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(1)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(4)
TestEntityAutoPK(10, "").save()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(2)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(10)
TestEntityAutoPK(12, "").save()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(3)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(12)
}
@Test
fun testPersistAutoPKEntityWithSaveManagedMethod() {
val result = TestEntityAutoPK().saveManaged(realm)
Truth.assertThat(result.isManaged)
Truth.assertThat(count<TestEntityAutoPK>(realm)).isEqualTo(1)
}
@Test
fun testPersistAutoPKEntityListWithSaveMethod() {
val list = listOf(TestEntityAutoPK(), TestEntityAutoPK(), TestEntityAutoPK())
list.saveAll()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(3)
Truth.assertThat(queryFirst<TestEntityAutoPK>()?.id).isEqualTo(1)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(3)
}
@Test
fun testPersistAutoPKEntityArrayWithSaveMethod() {
val list = arrayOf(TestEntityAutoPK(), TestEntityAutoPK(), TestEntityAutoPK())
list.saveAll()
Truth.assertThat(count<TestEntityAutoPK>()).isEqualTo(3)
Truth.assertThat(queryFirst<TestEntityAutoPK>()?.id).isEqualTo(1)
Truth.assertThat(queryLast<TestEntityAutoPK>()?.id).isEqualTo(3)
}
@Test
fun testPersistAutoPKEntityListWithSaveManagedMethod() {
val list = listOf(TestEntityAutoPK(), TestEntityAutoPK(), TestEntityAutoPK())
list.saveAllManaged(realm)
Truth.assertThat(count<TestEntityAutoPK>(realm)).isEqualTo(3)
}
@Test
fun testPersistAutoPKEntityArrayWithSavemanagedMethod() {
val list = arrayOf(TestEntityAutoPK(), TestEntityAutoPK(), TestEntityAutoPK())
list.saveAllManaged(realm)
Truth.assertThat(count<TestEntityAutoPK>(realm)).isEqualTo(3)
}
@Test
fun testUpdateEntity() {
TestEntity("test").save()
TestEntity().queryAndUpdate({ equalTo("name", "test") }) {
it.name = "updated"
}
val result = queryFirst<TestEntity> { equalTo("name", "updated") }
Truth.assertThat(result).isNotNull()
Truth.assertThat(result?.name).isEqualTo("updated")
}
@Test
fun testUpdatePKEntity() {
TestEntityPK(1, "test").save()
TestEntityPK().queryAndUpdate({ equalTo("name", "test") }) {
it.name = "updated"
}
val result = queryFirst<TestEntityPK> { equalTo("name", "updated") }
Truth.assertThat(result).isNotNull()
Truth.assertThat(result?.name).isEqualTo("updated")
}
/**
* QUERY TESTS WITH EMPTY DB
*/
@Test
fun testQueryFirstObjectWithEmptyDBShouldReturnNull() {
Truth.assertThat(queryFirst<TestEntity>()).isNull()
}
@Test
fun testAsyncQueryFirstObjectWithEmptyDBShouldReturnNull() {
block {
queryFirstAsync<TestEntity> { Truth.assertThat(it).isNull(); release() }
}
}
@Test
fun testQueryLastObjectWithEmptyDBShouldReturnNull() {
Truth.assertThat(queryLast<TestEntity>()).isNull()
}
@Test
fun testQueryLastObjectWithConditionAndEmptyDBShouldReturnNull() {
Truth.assertThat(queryLast<TestEntity> { equalTo("name", "test") }).isNull()
}
@Test
fun testAsyncQueryLastObjectWithEmptyDBShouldReturnNull() {
block {
queryLastAsync<TestEntity> { Truth.assertThat(it).isNull(); release() }
}
}
@Test
fun testAllItemsShouldReturnEmptyCollectionWhenDBIsEmpty() {
Truth.assertThat(queryAll<TestEntity>()).hasSize(0)
}
@Test
fun testAllItemsAsyncShouldReturnEmptyCollectionWhenDBIsEmpty() {
block {
queryAllAsync<TestEntity> { Truth.assertThat(it).hasSize(0); release() }
}
}
@Test
fun testQueryConditionalWhenDBIsEmpty() {
val result = query<TestEntity> { equalTo("name", "test") }
Truth.assertThat(result).hasSize(0)
}
@Test
fun etestQueryFirstItemWhenDBIsEmpty() {
val result = queryFirst<TestEntity> { equalTo("name", "test") }
Truth.assertThat(result).isNull()
}
@Test
fun testQuerySortedWhenDBIsEmpty() {
val result = querySorted<TestEntity>("name", Sort.ASCENDING) { equalTo("name", "test") }
Truth.assertThat(result).hasSize(0)
}
/**
* QUERY TESTS WITH POPULATED DB
*/
@Test
fun testQueryFirstItemShouldReturnFirstItemWhenDBIsNotEmpty() {
populateDBWithTestEntityPK(numItems = 5)
Truth.assertThat(queryFirst<TestEntityPK>()).isNotNull()
Truth.assertThat(queryFirst<TestEntityPK>()?.id).isEqualTo(0)
}
@Test
fun testAsyncQueryFirstItemShouldReturnFirstItemWhenDBIsNotEmpty() {
populateDBWithTestEntityPK(numItems = 5)
block {
queryFirstAsync<TestEntityPK> {
Truth.assertThat(it).isNotNull()
Truth.assertThat(it?.id).isEqualTo(0)
release()
}
}
}
@Test
fun testQueryLastItemShouldReturnLastItemWhenDBIsNotEmpty() {
populateDBWithTestEntityPK(numItems = 5)
Truth.assertThat(queryLast<TestEntityPK>()?.id).isEqualTo(4)
}
@Test
fun testQueryLastItemWithConditionShouldReturnLastItemWhenDBIsNotEmpty() {
populateDBWithTestEntityPK(numItems = 5)
Truth.assertThat(queryLast<TestEntityPK> { equalToValue("id", 3) }?.id).isEqualTo(3)
}
@Test
fun testAsyncQueryLastItemShouldReturnLastItemWhenDBIsNotEmpty() {
populateDBWithTestEntityPK(numItems = 5)
block {
queryLastAsync<TestEntityPK> {
Truth.assertThat(it).isNotNull()
release()
}
}
}
@Test
fun testQueryAllItemsShouldReturnAllItemsWhenDBIsNotEmpty() {
populateDBWithTestEntity(numItems = 5)
Truth.assertThat(queryAll<TestEntity>()).hasSize(5)
}
@Test
fun testAsyncQueryAllItemsShouldReturnAllItemsWhenDBIsNotEmpty() {
populateDBWithTestEntity(numItems = 5)
block {
queryAllAsync<TestEntity> { Truth.assertThat(it).hasSize(5); release() }
}
}
@Test
fun testQueryAllItemsAfterSaveCollection() {
val list = listOf(TestEntityPK(1), TestEntityPK(2), TestEntityPK(3))
list.saveAll()
Truth.assertThat(queryAll<TestEntityPK>()).hasSize(3)
}
/**
* QUERY TESTS WITH WHERE STATEMENT
*/
@Test
fun testWhereQueryShouldReturnExpectedItems() {
populateDBWithTestEntityPK(numItems = 5)
val results = query<TestEntityPK> { equalToValue("id", 1) }
Truth.assertThat(results).hasSize(1)
Truth.assertThat(results.first().id).isEqualTo(1)
Truth.assertThat(results.map { it.id }).containsAllIn(results.map { it.id })
}
@Test
fun testAsyncWhereQueryShouldReturnExpectedItems() {
populateDBWithTestEntityPK(numItems = 5)
block {
queryAsync<TestEntityPK>({
equalToValue("id", 1)
}) { results ->
Truth.assertThat(results).hasSize(1)
Truth.assertThat(results.first().id).isEqualTo(1)
release()
}
}
}
@Test
fun testWhereQueryShouldNotReturnAnyItem() {
populateDBWithTestEntityPK(numItems = 5)
val results = query<TestEntityPK> { equalToValue("id", 6) }
Truth.assertThat(results).hasSize(0)
}
@Test
fun testAsyncWhereQueryShouldNotReturnAnyItem() {
populateDBWithTestEntityPK(numItems = 5)
block {
queryAsync<TestEntityPK>({ equalToValue("id", 6) }) { results ->
Truth.assertThat(results).hasSize(0)
release()
}
}
}
@Test
fun testFirstItemWhenDbIsNotEmpty() {
populateDBWithTestEntityPK(numItems = 5)
val result = queryFirst<TestEntityPK> { equalToValue("id", 2) }
Truth.assertThat(result).isNotNull()
Truth.assertThat(result?.id).isEqualTo(2)
}
@Test
fun testQueryAscendingShouldReturnOrderedResults() {
populateDBWithTestEntityPK(numItems = 5)
val result = querySorted<TestEntityPK>("id", Sort.ASCENDING)
Truth.assertThat(result).hasSize(5)
Truth.assertThat(result.first().id).isEqualTo(0)
Truth.assertThat(result.last().id).isEqualTo(4)
}
@Test
fun testQueryDescendingShouldReturnOrderedResults() {
populateDBWithTestEntityPK(numItems = 5)
val result = querySorted<TestEntityPK>("id", Sort.DESCENDING)
Truth.assertThat(result).hasSize(5)
Truth.assertThat(result.first().id).isEqualTo(4)
Truth.assertThat(result.last().id).isEqualTo(0)
}
@Test
fun testQueryDescendingWithFilterShouldReturnOrderedResults() {
populateDBWithTestEntityPK(numItems = 5)
val result = querySorted<TestEntityPK>("id", Sort.DESCENDING) {
lessThan("id", 3).greaterThan("id", 0)
}
Truth.assertThat(result).hasSize(2)
Truth.assertThat(result.first().id).isEqualTo(2)
Truth.assertThat(result.last().id).isEqualTo(1)
}
/**
* DELETION TESTS
*/
@Test
fun testDeleteEntities() {
populateDBWithTestEntity(numItems = 5)
deleteAll<TestEntity>()
Truth.assertThat(TestEntity().queryAll()).hasSize(0)
Truth.assertThat(queryAll<TestEntity>()).hasSize(0)
}
@Test
fun testDeleteEntitiesWithPK() {
populateDBWithTestEntityPK(numItems = 5)
deleteAll<TestEntityPK>()
Truth.assertThat(TestEntityPK().queryAll()).hasSize(0)
Truth.assertThat(queryAll<TestEntityPK>()).hasSize(0)
}
@Test
fun testDeleteEntitiesWithStatement() {
populateDBWithTestEntityPK(numItems = 5)
delete<TestEntityPK> { equalToValue("id", 1) }
Truth.assertThat(queryAll<TestEntityPK>()).hasSize(4)
}
/**
* UTILITY TEST METHODS
*/
private fun populateDBWithTestEntity(numItems: Int) {
(0 until numItems).forEach { TestEntity().save() }
}
private fun populateDBWithTestEntityPK(numItems: Int) {
(0 until numItems).forEach { TestEntityPK(it.toLong()).save() }
}
private fun blockLatch() {
if (!latchReleased) {
latch.await()
}
}
private fun release() {
latchReleased = true
latch.countDown()
latch = CountDownLatch(1)
}
fun block(closure: () -> Unit) {
latchReleased = false
closure()
blockLatch()
}
}
|
namespace MvcGrabBag.Web.Caching
{
/// <summary>
/// Defines how an item is stored in the cache
/// </summary>
public enum CacheScope
{
/// <summary>
/// Cache a unique copy of the item for each user
/// </summary>
User,
/// <summary>
/// Cache a single copy of the item for the entire application
/// </summary>
Application
}
}
|
using DataStructures
using Logging
import Base.show
# package code goes here
Logging.configure(level=DEBUG)
typealias TT_TYPE Array{UInt,1}
typealias IDX_TYPE UInt
typealias TV_TYPE Array{Real,1}
function __init__()
println("loading MJPlayGround.jl")
Logging.configure(level=DEBUG)
end
function init()
global vvals = TV_TYPE()
global pvals = TV_TYPE()
global vidx = 1
global pidx = 1
global tt = TT_TYPE()
end
#export
# Var, Par, @test, @var, @test1
#the AD types below
const TYPE_V = 1 #variable node
const TYPE_P = 2 #param node
const TYPE_OU = 3 #unary op
const TYPE_OB = 4 #binary op
abstract Placeholder
immutable AD_V <: Placeholder
idx::IDX_TYPE #index on tape
t::Uint #type code
end
immutable AD_P <: Placeholder
idx::IDX_TYPE #index on tape
t::Uint #type code
end
immutable AD_O <: Placeholder
idx::IDX_TYPE #index on tape
t::Uint #type code
end
function AD_V(tt::TT_TYPE,val=NaN)
push!(tt,vidx)
push!(tt,TYPE_V)
push!(vvals,val)
global vidx +=1
this = AD_V(length(tt),TYPE_V)
return this
end
function AD_P(tt::TT_TYPE,val=NaN)
push!(tt,pidx)
push!(tt,TYPE_P)
push!(pvals,val)
global pidx +=1
this = AD_P(length(tt),TYPE_P)
return this
end
function AD_O(tt::TT_TYPE,oc, lidx::UInt)
push!(tt,lidx)
push!(tt,oc)
push!(tt,TYPE_OU)
this = AD_O(length(tt),TYPE_OU)
return this
end
function AD_O(tt::TT_TYPE,oc, lidx::UInt, ridx::UInt)
push!(tt,ridx)
push!(tt,lidx)
push!(tt,oc)
push!(tt,TYPE_OB)
this = AD_O(length(tt),TYPE_OB)
return this
end
function Base.show(io::IO,m::AD_V)
print("AD_V[",m.idx,"]")
end
function Base.show(io::IO,m::AD_P)
print("AD_P[",m.idx,"]")
end
function Base.show(io::IO,m::AD_O)
# print("(",m.l)
# print(string(getOpSym(m)))
# if(!isnull(m.r))
# print(m.r.value)
# end
# print(")")
print("AD_O[",m.idx,"]")
end
#Operator overloading function for AD types
const OP = (:+,:-,:*,:/,:sin,:cos)
function getOpSym(oc)
return OP[oc]
end
const B_OP_START = 1
const B_OP_END = 4
for oc = B_OP_START:1:B_OP_END
o = OP[oc]
debug("setup operator ",o)
eval(quote
($o)(l::Placeholder,r::Placeholder) =
begin
debug("at",OP[$(oc)])
this = AD_O(tt,$(oc),l.idx,r.idx)
return this
end
end)
end
const U_OP_START = 5
const U_OP_END = 6
for oc = U_OP_START:1:U_OP_END
o = OP[oc]
debug("setup operator ",o)
eval(quote
($o)(l::Placeholder) =
begin
debug("at",$(oc))
this = AD_O(tt,$(oc),l.idx)
return this
end
end)
end
function evaluate(tt::TT_TYPE,idx::IDX_TYPE)
println("enter - ",idx)
ret = NaN
if(idx != 0)
ntype = tt[idx] #type node
idx -= 1
if(ntype == TYPE_P)
ret = pvals[tt[idx]]
idx -=1
elseif(ntype == TYPE_V)
ret = vvals[tt[idx]]
idx -= 1
elseif(ntype == TYPE_OB)
oc = tt[idx]
assert(B_OP_START<= oc <= B_OP_END)
idx -= 1
lidx = tt[idx]
idx -= 1
ridx = tt[idx]
idx -= 1
println("before left - ",lidx)
(lval,idx) = evaluate(tt,lidx)
println("before right - ",idx)
(rval,idx) = evaluate(tt,ridx)
ret = evaluate(oc,lval,rval)
else
oc = tt[idx]
assert(U_OP_START <= oc <= U_OP_END)
idx -= 1
lidx = tt[idx]
idx -= 1
println("unary before - ",idx)
(lval,idx) = evaluate(tt,lidx)
ret = evaluate(oc,lval)
end
println("now - ",idx)
end
assert(!isnan(ret))
println("exit - ",idx)
return ret,idx
end
function evaluate(oc::UInt,lval::Real,rval::Real)
assert(B_OP_START<= oc <= B_OP_END)
ex = Expr(:call,OP[oc],lval,rval)
println("evaluate:",ex)
return eval(ex)
end
function evaluate(oc::UInt,lval::Real)
assert(U_OP_START<= oc <= U_OP_END)
ex = Expr(:call,OP[oc],lval)
println("evaluate:",ex)
return eval(ex)
end
## Interface method
function feval(tt::TT_TYPE)
(val,idx) = evaluate(tt,convert(UInt,length(tt)))
assert(idx == 0)
return val
end
#macro rewrite(expr)
# print(expr)
# debug("rewrite ",expr)
# return :(4+3)
#end
macro var(x)
quote
debug("var - ",$(esc(x)i))
#v = Var($global_idxi)
#$(esc(global_idx)) += 1
global global_idx += 1
#push!(s,v)
end
end
macro test(args...)
println("---",length(args))
arg1 = args[1]
arg2 = args[2]
code = quote
local a = $(esc(m))
#local b = $expr;
local c = $(esc(arg1))
dump($(esc(arg1)))
end
return code
end
ex = :(3+3)
macro test1(ex)
code =:()
code = :($code;ex=3
#dump($(esc(ex)))
#println($(esc(ex)))
#a = $(esc(ex))
#dump($ex)
#println(eval($(ex)))
#$(ex), $(esc(ex))
)
return code
end
macro test2(ex...)
quote
x = 1
$(esc(ex...))
nothing
end
end
macro p(n)
if typeof(n) == Expr
println(n.args)
end
eval(n)
end
macro f(x)
quote
s = 4
(s, $(esc(s)))
end
end
macro dotimes(n,body)
quote
for i=1:$(esc(n))
$(esc(body))
end
end
end
|
@extends('master')
@section('content')
<h2>New Quote</h2>
<form method="post">
<div class="form-group">
<label for="person">Person</label>
<input type="text" class="form-control" id="person" name="person" placeholder="Name">
</div>
<div class="form-group">
<label for="quote">Quote</label>
<input type="text" class="form-control" id="quote" name="quote" placeholder="Quote...">
</div>
<button type="submit" class="btn btn-success">Submit</button>
</form>
<h2>Quotes</h2>
<table class="table table-bordered table-striped">
@foreach($quotes as $quote)
<tr>
<td>{{ $quote->date() }}</td>
<td>{{ $quote->person }}</td>
<td>{{ $quote->quote }}</td>
</tr>
@endforeach
</table>
@endsection
|
// DO WHATEVER YOU WANT HERE
const createEnumerableProperty = (index) => {
return index;
};
const createNotEnumerableProperty = (index) => {
Object.defineProperty(Object.prototype, index, {
get: function(){
return Object.prototype._value;
},
set: function(val){
Object.prototype._value = val
}
})
return index;
};
const createProtoMagicObject = () => {
return Function
};
const incrementor = () => {
incrementor.count = incrementor.count ? incrementor.count : 1;
function counter() {
incrementor.count++;
return counter;
}
counter.valueOf = function() {
return incrementor.count++;
}
return counter;
};
const asyncIncrementor = () => {
asyncIncrementor.count = asyncIncrementor.count ? asyncIncrementor.count : 0;
return new Promise( (resolve, regect) => {
asyncIncrementor.count++;
return resolve(asyncIncrementor.count);
})
};
const createIncrementer = () => {
function *iterator(n = 100) {
for(i = 1; i < n; i++) {
yield i;
}
}
return iterator();
};
// return same argument not earlier than in one second, and not later, than in two
const returnBackInSecond = (res) => {
return new Promise((resolve, reject) =>{
setTimeout(() => resolve(res), 1000);
});
};
const getDeepPropertiesCount = () => {
let count = 0;
let arrProps = Object.getOwnPropertyNames(obj);
// returns an array corresponding to the properties found directly in obj
count += arrProps.length;
for(let i = 0; i < arrProps.length; i++) {
if (Object.getOwnPropertyNames(obj[arrProps[i]]).length > 0) {
count += getDeepPropertiesCount(obj[arrProps[i]]);//recursive invoke
}
}
return count;
};
const createSerializedObject = () => {
JSON.stringify = (obj) => obj;
JSON.parse = (obj) => obj;
return {};
};
const toBuffer = () => {};
const sortByProto = (array) => {
const findCount = (ob) => {
let count = 0;
let case1 = ob;
while (case1.__proto__) {
count++;
case1 = case1.__proto__
}
return count;
}
return array.sort((a,b) => findCount(a) - findCount(b))
};
exports.createEnumerableProperty = createEnumerableProperty;
exports.createNotEnumerableProperty = createNotEnumerableProperty;
exports.createProtoMagicObject = createProtoMagicObject;
exports.incrementor = incrementor;
exports.asyncIncrementor = asyncIncrementor;
exports.createIncrementer = createIncrementer;
exports.returnBackInSecond = returnBackInSecond;
exports.getDeepPropertiesCount = getDeepPropertiesCount;
exports.createSerializedObject = createSerializedObject;
exports.sortByProto = sortByProto;
|
module Frikandel
module BindSessionToIpAddress
extend ActiveSupport::Concern
include SessionInvalidation
included do
if respond_to?(:before_action)
append_before_action :validate_session_ip_address
else
append_before_filter :validate_session_ip_address
end
end
private
def validate_session_ip_address
if session.key?(:ip_address) && !ip_address_match_with_current?
on_invalid_session
elsif !session.key?(:ip_address)
reset_session
else # session ip address is valid
persist_session_ip_address
end
end
def persist_session_ip_address
session[:ip_address] = current_ip_address
end
def current_ip_address
request.remote_ip
end
def ip_address_match_with_current?
session[:ip_address] == current_ip_address
end
def reset_session
super
persist_session_ip_address
end
end
end
|
<?php
namespace App\Http\Controllers;
use App\Helpers\Helper;
use App\Helpers\Status;
use App\Http\Requests\StoreBadgeRequest;
use App\Http\Requests\UpdateBadgeRequest;
use Illuminate\Http\JsonResponse;
use Illuminate\Http\Request;
class BadgeController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$badges = \App\Badge::where('state', Status::ACTIVE)
->orderByDesc('created_at')
->paginate();
if ($badges->count()) {
$badges->makeHidden(['updated_at']);
}
return response()->json($badges, JsonResponse::HTTP_OK);
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
try {
$badgeId = decrypt($id) ?? null;
$message = 'Something went wrong please try again later!';
if (!intval($badgeId)) {
throw new \Exception($message);
}
$badge = \App\Badge::find($badgeId);
if ($badge == null) {
throw new \Exception($message);
}
return response()->json(
$badge->setHidden(['updated_at']),
JsonResponse::HTTP_OK
);
} catch (\Throwable $th) {
return response()->json(
['message' => $th->getMessage()],
JsonResponse::HTTP_UNPROCESSABLE_ENTITY
);
}
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(StoreBadgeRequest $request)
{
try {
$badge = new \App\Badge();
$badge->first_name = $request->firstName;
$badge->last_name = $request->lastName;
$badge->email = $request->email;
$badge->job_title = $request->jobTitle;
$badge->twitter = $request->twitter;
$badge->avatar_url = Helper::getAvatar(
$request->input('avatarUrl', null),
$request->email
);
$badge->save();
// The badge has been created correctly!
return response()->json(
$badge->setHidden(['updated_at']),
JsonResponse::HTTP_OK
);
} catch (\Throwable $th) {
return response()->json(
['message' => $th->getMessage()],
JsonResponse::HTTP_UNPROCESSABLE_ENTITY
);
}
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(UpdateBadgeRequest $request, $id)
{
try {
$badgeId = decrypt($id) ?? null;
$message = 'Something went wrong please try again later!';
if (!intval($badgeId)) {
throw new \Exception($message);
}
$badge = \App\Badge::find($badgeId);
if ($badge == null) {
throw new \Exception($message);
}
$badge->first_name = $request->firstName;
$badge->last_name = $request->lastName;
$badge->email = $request->email;
$badge->job_title = $request->jobTitle;
$badge->twitter = $request->twitter;
if ($badge->isDirty('email')) {
$badge->avatar_url = Helper::getAvatar(
$request->input('avatarUrl', null),
$request->email
);
}
if ($badge->isDirty()) {
$badge->update();
}
return response()->json(
$badge->setHidden(['updated_at']),
JsonResponse::HTTP_OK
);
} catch (\Throwable $th) {
return response()->json(
['message' => $th->getMessage()],
JsonResponse::HTTP_UNPROCESSABLE_ENTITY
);
}
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
try {
$badgeId = decrypt($id) ?? null;
$message = 'Something went wrong please try again later!';
if (!intval($badgeId)) {
throw new \Exception($message);
}
$badge = \App\Badge::find($badgeId);
if ($badge == null) {
throw new \Exception($message);
}
$badge->state = Status::DISABLED;
$badge->update();
return response()->json(
['message' => 'The badge has been removed successfully!'],
JsonResponse::HTTP_OK
);
} catch (\Throwable $th) {
return response()->json(
['message' => $th->getMessage()],
JsonResponse::HTTP_UNPROCESSABLE_ENTITY
);
}
}
}
|
PROGRAM decay_chain
! The following line below may be used to compile this program:
! gfortran -o decay_chain decay_chain.f -ffree-form
implicit none
integer, parameter :: max_size = 2000
integer, parameter :: output = 7
integer :: i
real :: hl_1,hl_2
real :: lambda_1,lambda_2
real :: N_0
real :: time(max_size)
real :: N_1_pop(max_size)
real :: N_2_pop(max_size)
real :: N_3_pop(max_size)
real, dimension(max_size,4) :: results
!Prompts the user for the half lives of parent and daughter and then reads them in
write (*,*) 'Enter half lives of parent and daughter nuclei (in days): '
read (*,*) hl_1, hl_2
!Prints back out user specified info for reference
write (*,*) 'The half lives are: ', hl_1, ' days and ', hl_2, ' days'
!Initializes initial population of parent nuclide
N_0 = 1E6
!Calulates the lambdas of the parent and daughter in inverse seconds
lambda_1 = log(2.)/(hl_1*24.*60.*60.)
lambda_2 = log(2.)/(hl_2*24.*60.*60.)
!Creates time array of 3600 second (1 hour) increments
do i = 1, max_size
time(i) = (i-1)*3600.
end do
!Creates array of parent population using expression from README.md file
do i = 1, max_size
N_1_pop(i) = N_0 * exp(-1.*lambda_1*time(i))
end do
!Creates array of daughter population using expression from README.md file
do i = 1, max_size
N_2_pop(i) = ((lambda_1*N_0)/(lambda_2 - lambda_1)) * (exp(-1.*lambda_1*time(i)) - exp(-1.*lambda_2*time(i)))
end do
!Creates array of granddaughter population using expression from README.md file
do i = 1, max_size
N_3_pop(i) = (N_0/(lambda_2 - lambda_1)) * ((lambda_2*(1. - exp(-1.*lambda_1*time(i)))) &
-(lambda_1 *(1. - exp(-1.*lambda_2*time(i)))))
end do
!Fills each column of the results array with time and populations
results(:,1) = time(:)
results(:,2) = N_1_pop(:)
results(:,3) = N_2_pop(:)
results(:,4) = N_3_pop(:)
!Opening a CSV file for preparation of outputting the numbers from results array
open(unit=output,file='OUTPUT.csv')
!Writes out the first line of the CSV file which will just be the lables for the columns
write(output, '(a12,",",a17,",",a19,",",a24)') 'Time_(hours)', 'Parent_Population', 'Daughter_Population', &
'Granddaughter_Population'
!Iterates through the results array and writes out each row
do i = 1, max_size
write(output, '(f0.0,",",f0.4,",",f0.4,",",f0.4)') results(i,1)/3600., results(i,2), results(i,3), results(i,4)
end do
!Closes the CSV file
close (output)
!Ends the program
END PROGRAM decay_chain
|
package test
func test1() (foo int) {
return // want "return values not explicitly specified"
}
func test2() (foo int) {
if false {
return 1 // not naked
}
if false {
return // want "return values not explicitly specified"
}
return // want "return values not explicitly specified"
}
func test3() func() int {
return func() (n int) {
return // want "return values not explicitly specified"
}
}
func test4() func() int {
return func() (n int) {
func() {
return // not a naked return
}()
return // want "return values not explicitly specified"
}
}
|
<?php
namespace MainSettings\Repositories;
use MainSettings\Models\MainSetting;
use File;
class MainSettingRepository implements MainSettingRepositoryInterface
{
public function allData(){
return MainSetting::all();
}
public function dataWithConditions( $conditions){
$wheres = '';
foreach ($conditions as $key => $value){
$wheres .= '->where("'.$key.'","'.$key.'")';
}
$wheres = str_replace("'","",$wheres);
return MainSetting::$wheres->get();
}
public function getDataId($id){
return MainSetting::findOrfail($id);
}
public function saveData($request)
{
$title = json_encode($request->title);
$content = json_encode($request->content);
$mobile = ($request->mobile) ? $request->mobile : null;
$email = ($request->email) ? $request->email : null;
$address = ($request->address) ? $request->address : null;
$meta_title = ($request->meta_title) ? json_encode($request->meta_title) : null;
$meta_desc = ($request->meta_desc) ? json_encode($request->meta_desc) : null;
$meta_keywords = ($request->meta_keywords) ? json_encode($request->meta_keywords) : null;
$socialmedia = ($request->soicalmedia) ? json_encode($request->soicalmedia) : null;
$banner_title = ($request->banner_title) ? json_encode($request->banner_title) : null;
$banner_content = ($request->banner_content) ? json_encode($request->banner_content) : null;
$banner_button_name = ($request->banner_button_name) ? json_encode($request->banner_button_name) : null;
$banner_button_link = ($request->banner_button_link) ? $request->banner_button_link : null;
$mainsettings = MainSetting::findOrfail(1);
$pathImage = public_path().'/uploads/backend/settings/';
File::makeDirectory($pathImage, $mode = 0777, true, true);
if ($request->logo) {
foreach ($request->logo as $key => $value) {
$imageName = $key.'_logo.'.$value->getClientOriginalExtension();
$value->move($pathImage, $imageName);
$logos[$key] = $imageName;
}
$logo = json_encode($logos);
$mainsettings->logo = $logo;
}
$mainsettings->title = $title;
$mainsettings->content = $content;
$mainsettings->mobile = $mobile;
$mainsettings->email = $email;
$mainsettings->address = $address;
$mainsettings->meta_title = $meta_title;
$mainsettings->meta_desc = $meta_desc;
$mainsettings->meta_keywords = $meta_keywords;
$mainsettings->socialmedia = $socialmedia;
$mainsettings->banner_title = $banner_title;
$mainsettings->banner_content = $banner_content;
$mainsettings->banner_button_name = $banner_button_name;
$mainsettings->banner_button_link = $banner_button_link;
$mainsettings->save();
}
}
|
package games;
import heroes.Hero;
import java.util.ArrayList;
import java.util.List;
public class Game {
private List<Hero> heroesPlayed = new ArrayList<>(10);
private int timePlayed = 0;
}
|
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'dart:ffi';
import 'dart:io';
import 'dart:typed_data';
import 'package:ffi/ffi.dart';
import 'package:ffigen/src/code_generator.dart';
import 'package:ffigen/src/header_parser/data.dart';
import 'package:ffigen/src/header_parser/includer.dart';
import 'package:ffigen/src/strings.dart' as strings;
import 'package:logging/logging.dart';
import 'package:path/path.dart' as p;
import '../clang_bindings/clang_bindings.dart' as clang_types;
import '../data.dart';
import '../utils.dart';
final _logger = Logger('ffigen.header_parser.macro_parser');
/// Adds a macro definition to be parsed later.
void saveMacroDefinition(clang_types.CXCursor cursor) {
final macroUsr = cursor.usr();
final originalMacroName = cursor.spelling();
if (clang.clang_Cursor_isMacroBuiltin(cursor) == 0 &&
clang.clang_Cursor_isMacroFunctionLike(cursor) == 0 &&
shouldIncludeMacro(macroUsr, originalMacroName)) {
// Parse macro only if it's not builtin or function-like.
_logger.fine(
"++++ Saved Macro '$originalMacroName' for later : ${cursor.completeStringRepr()}");
final prefixedName = config.macroDecl.renameUsingConfig(originalMacroName);
bindingsIndex.addMacroToSeen(macroUsr, prefixedName);
_saveMacro(prefixedName, macroUsr, originalMacroName);
}
}
/// Saves a macro to be parsed later.
///
/// Macros are parsed later in [parseSavedMacros()].
void _saveMacro(String name, String usr, String originalName) {
savedMacros[name] = Macro(usr, originalName);
}
List<Constant>? _bindings;
/// Macros cannot be parsed directly, so we create a new `.hpp` file in which
/// they are assigned to a variable after which their value can be determined
/// by evaluating the value of the variable.
List<Constant>? parseSavedMacros() {
_bindings = [];
if (savedMacros.keys.isEmpty) {
return _bindings;
}
// Create a file for parsing macros;
final file = createFileForMacros();
final index = clang.clang_createIndex(0, 0);
Pointer<Pointer<Utf8>> clangCmdArgs = nullptr;
var cmdLen = 0;
clangCmdArgs = createDynamicStringArray(config.compilerOpts);
cmdLen = config.compilerOpts.length;
final tu = clang.clang_parseTranslationUnit(
index,
file.path.toNativeUtf8().cast(),
clangCmdArgs.cast(),
cmdLen,
nullptr,
0,
clang_types.CXTranslationUnit_Flags.CXTranslationUnit_KeepGoing,
);
if (tu == nullptr) {
_logger.severe('Unable to parse Macros.');
} else {
final rootCursor = clang.clang_getTranslationUnitCursor(tu);
final resultCode = clang.clang_visitChildren(
rootCursor,
Pointer.fromFunction(_macroVariablevisitor, exceptional_visitor_return),
nullptr,
);
visitChildrenResultChecker(resultCode);
}
clang.clang_disposeTranslationUnit(tu);
clang.clang_disposeIndex(index);
// Delete the temp file created for macros.
file.deleteSync();
return _bindings;
}
/// Child visitor invoked on translationUnitCursor for parsing macroVariables.
int _macroVariablevisitor(clang_types.CXCursor cursor,
clang_types.CXCursor parent, Pointer<Void> clientData) {
Constant? constant;
try {
if (isFromGeneratedFile(cursor) &&
_macroVarNames.contains(cursor.spelling()) &&
cursor.kind == clang_types.CXCursorKind.CXCursor_VarDecl) {
final e = clang.clang_Cursor_Evaluate(cursor);
final k = clang.clang_EvalResult_getKind(e);
_logger.fine('macroVariablevisitor: ${cursor.completeStringRepr()}');
/// Get macro name, the variable name starts with '<macro-name>_'.
final macroName = MacroVariableString.decode(cursor.spelling());
switch (k) {
case clang_types.CXEvalResultKind.CXEval_Int:
constant = Constant(
usr: savedMacros[macroName]!.usr,
originalName: savedMacros[macroName]!.originalName,
name: macroName,
rawType: 'int',
rawValue: clang.clang_EvalResult_getAsLongLong(e).toString(),
);
break;
case clang_types.CXEvalResultKind.CXEval_Float:
constant = Constant(
usr: savedMacros[macroName]!.usr,
originalName: savedMacros[macroName]!.originalName,
name: macroName,
rawType: 'double',
rawValue:
_writeDoubleAsString(clang.clang_EvalResult_getAsDouble(e)),
);
break;
case clang_types.CXEvalResultKind.CXEval_StrLiteral:
final rawValue = _getWrittenRepresentation(
macroName,
clang.clang_EvalResult_getAsStr(e),
);
constant = Constant(
usr: savedMacros[macroName]!.usr,
originalName: savedMacros[macroName]!.originalName,
name: macroName,
rawType: 'String',
rawValue: "'$rawValue'",
);
break;
}
clang.clang_EvalResult_dispose(e);
if (constant != null) {
_bindings!.add(constant);
}
}
} catch (e, s) {
_logger.severe(e);
_logger.severe(s);
rethrow;
}
return clang_types.CXChildVisitResult.CXChildVisit_Continue;
}
/// Returns true if cursor is from generated file.
bool isFromGeneratedFile(clang_types.CXCursor cursor) {
final s = cursor.sourceFileName();
return p.basename(s) == _generatedFileBaseName;
}
/// Base name of generated file.
String? _generatedFileBaseName;
/// Generated macro variable names.
///
/// Used to determine if macro should be included in bindings or not.
late Set<String> _macroVarNames;
/// Creates a temporary file for parsing macros in current directory.
File createFileForMacros() {
final fileNameBase = 'temp_for_macros';
final fileExt = 'hpp';
// Find a filename which doesn't already exist.
var file = File('$fileNameBase.$fileExt');
var i = 0;
while (file.existsSync()) {
i++;
file = File('${fileNameBase.split('.')[0]}_$i.$fileExt');
}
// Create file.
file.createSync();
// Save generted name.
_generatedFileBaseName = p.basename(file.path);
// Write file contents.
final sb = StringBuffer();
for (final h in config.headers.entryPoints) {
sb.writeln('#include "$h"');
}
_macroVarNames = {};
for (final prefixedMacroName in savedMacros.keys) {
// Write macro.
final macroVarName = MacroVariableString.encode(prefixedMacroName);
sb.writeln(
'auto $macroVarName = ${savedMacros[prefixedMacroName]!.originalName};');
// Add to _macroVarNames.
_macroVarNames.add(macroVarName);
}
final macroFileContent = sb.toString();
// Log this generated file for debugging purpose.
// We use the finest log because this file may be very big.
_logger.finest('=====FILE FOR MACROS====');
_logger.finest(macroFileContent);
_logger.finest('========================');
file.writeAsStringSync(macroFileContent);
return file;
}
/// Deals with encoding/decoding name of the variable generated for a Macro.
class MacroVariableString {
static String encode(String s) {
return '_${s.length}_${s}_generated_macro_variable';
}
static String decode(String s) {
// Remove underscore.
s = s.substring(1);
final intReg = RegExp('[0-9]+');
final lengthEnd = intReg.matchAsPrefix(s)!.end;
final len = int.parse(s.substring(0, lengthEnd));
// Name starts after an unerscore.
final nameStart = lengthEnd + 1;
return s.substring(nameStart, nameStart + len);
}
}
/// Gets a written representation string of a C string.
///
/// E.g- For a string "Hello\nWorld", The new line character is converted to \n.
/// Note: The string is considered to be Utf8, but is treated as Extended ASCII,
/// if the conversion fails.
String _getWrittenRepresentation(String macroName, Pointer<Char> strPtr) {
final sb = StringBuffer();
try {
// Consider string to be Utf8 encoded by default.
sb.clear();
// This throws a Format Exception if string isn't Utf8 so that we handle it
// in the catch block.
final result = strPtr.cast<Utf8>().toDartString();
for (final s in result.runes) {
sb.write(_getWritableChar(s));
}
} catch (e) {
// Handle string if it isn't Utf8. String is considered to be
// Extended ASCII in this case.
_logger.warning(
"Couldn't decode Macro string '$macroName' as Utf8, using ASCII instead.");
sb.clear();
final length = strPtr.cast<Utf8>().length;
final charList = Uint8List.view(
strPtr.cast<Uint8>().asTypedList(length).buffer, 0, length);
for (final char in charList) {
sb.write(_getWritableChar(char, utf8: false));
}
}
return sb.toString();
}
/// Creates a writable char from [char] code.
///
/// E.g- `\` is converted to `\\`.
String _getWritableChar(int char, {bool utf8 = true}) {
/// Handle control characters.
if (char >= 0 && char < 32 || char == 127) {
/// Handle these - `\b \t \n \v \f \r` as special cases.
switch (char) {
case 8: // \b
return r'\b';
case 9: // \t
return r'\t';
case 10: // \n
return r'\n';
case 11: // \v
return r'\v';
case 12: // \f
return r'\f';
case 13: // \r
return r'\r';
default:
final h = char.toRadixString(16).toUpperCase().padLeft(2, '0');
return '\\x$h';
}
}
/// Handle characters - `$ ' \` these need to be escaped when writing to file.
switch (char) {
case 36: // $
return r'\$';
case 39: // '
return r"\'";
case 92: // \
return r'\\';
}
/// In case encoding is not Utf8, we know all characters will fall in [0..255]
/// Print range [128..255] as `\xHH`.
if (!utf8) {
final h = char.toRadixString(16).toUpperCase().padLeft(2, '0');
return '\\x$h';
}
/// In all other cases, simply convert to string.
return String.fromCharCode(char);
}
/// Converts a double to a string, handling cases like Infinity and NaN.
String _writeDoubleAsString(double d) {
if (d.isFinite) {
return d.toString();
} else {
// The only Non-Finite numbers are Infinity, NegativeInfinity and NaN.
if (d.isInfinite) {
return d.isNegative
? strings.doubleNegativeInfinity
: strings.doubleInfinity;
}
return strings.doubleNaN;
}
}
|
/**
* Atlas REST API
* Atlas exposes a variety of REST endpoints to work with types, entities, lineage and data discovery.
*
* OpenAPI spec version: 2.0.0-SNAPSHOT
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package io.swagger.client.model
case class JsonAtlasRelatedObjectId (
//
guid: Option[String] = None,
//
typeName: Option[String] = None,
//
uniqueAttributes: Option[Map[String, Any]] = None,
//
displayText: Option[String] = None,
entityStatus: Option[JsonStatus] = None,
relationshipAttributes: Option[JsonAtlasStruct] = None,
//
relationshipGuid: Option[String] = None,
relationshipStatus: Option[JsonStatusAtlasRelationship] = None,
//
relationshipType: Option[String] = None
)
|
namespace {{$nameSpace}}
use {{$package}}AppController as Controller;
class {{$className}} extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
//
}
}
|
<?php
namespace mycryptocheckout;
/**
@brief Handles the setup of menus.
@since 2017-12-09 07:05:04
**/
trait menu_trait
{
/**
@brief Init!
@since 2017-12-07 19:34:05
**/
public function init_menu_trait()
{
$this->add_action( 'admin_menu' );
$this->add_action( 'network_admin_menu' );
}
/**
@brief Admin menu callback.
@since 2017-12-07 19:35:46
**/
public function admin_menu()
{
$this->enqueue_js();
// For normal admin.
add_submenu_page(
'options-general.php',
// Page heading
__( 'MyCryptoCheckout Settings', 'mycryptocheckout' ),
// Menu item name
__( 'MyCryptoCheckout', 'mycryptocheckout' ),
'manage_options',
'mycryptocheckout',
[ &$this, 'admin_menu_tabs' ]
);
}
public function admin_menu_tabs()
{
$tabs = $this->tabs();
if ( ! defined( 'MYCRYPTOCHECKOUT_DISABLE_WALLET_EDITOR' ) )
{
$tabs->tab( 'currencies' )
->callback_this( 'admin_currencies' )
// Tab heading
->heading( __( 'MyCryptoCheckout Currencies', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Currencies', 'mycryptocheckout' ) );
if ( $tabs->get_is( 'edit_wallet' ) )
{
$wallet_id = $_GET[ 'wallet_id' ];
$wallets = $this->wallets();
$wallet = $wallets->get( $wallet_id );
$tabs->tab( 'edit_wallet' )
->callback_this( 'admin_edit_wallet' )
// Editing BTC wallet
->heading( sprintf( __( 'Editing %s wallet', 'mycryptocheckout' ), $wallet->get_currency_id() ) )
// Name of tab
->name( __( 'Edit wallet', 'mycryptocheckout' ) )
->parameters( $wallet_id );
}
}
$tabs->tab( 'account' )
->callback_this( 'admin_account' )
// Tab heading
->heading( __( 'MyCryptoCheckout Account', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Account', 'mycryptocheckout' ) );
$tabs->tab( 'autosettlements' )
->callback_this( 'autosettlement_admin' )
// Tab heading
->heading( __( 'MyCryptoCheckout Autosettlement Settings', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Autosettlements', 'mycryptocheckout' ) );
if ( $tabs->get_is( 'autosettlement_edit' ) )
{
$autosettlement_id = $_GET[ 'autosettlement_id' ];
$autosettlements = $this->autosettlements();
$autosettlement = $autosettlements->get( $autosettlement_id );
$tabs->tab( 'autosettlement_edit' )
->callback_this( 'autosettlement_edit' )
// Editing autosettlement TYPE
->heading( sprintf( __( 'Editing autosettlement %s', 'mycryptocheckout' ), $autosettlement->get_type() ) )
// Name of tab
->name( __( 'Edit autosettlement', 'mycryptocheckout' ) )
->parameters( $autosettlement_id );
}
$tabs->tab( 'donations' )
->callback_this( 'admin_donations' )
// Tab heading
->heading( __( 'MyCryptoCheckout Donations', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Donations', 'mycryptocheckout' ) );
if ( $this->is_network )
$tabs->tab( 'local_settings' )
->callback_this( 'admin_local_settings' )
// Tab heading
->heading( __( 'MyCryptoCheckout Local Settings', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Local Settings', 'mycryptocheckout' ) );
$tabs->tab( 'global_settings' )
->callback_this( 'admin_global_settings' )
// Tab heading
->heading( __( 'MyCryptoCheckout Global Settings', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Global Settings', 'mycryptocheckout' ) );
$tabs->tab( 'tools' )
->callback_this( 'admin_tools' )
// Tab heading
->heading( __( 'MyCryptoCheckout Tools', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Tools', 'mycryptocheckout' ) );
$tabs->tab( 'uninstall' )
->callback_this( 'admin_uninstall' )
// Tab heading
->heading( __( 'Uninstall MyCryptoCheckout', 'mycryptocheckout' ) )
// Name of tab
->name( __( 'Uninstall', 'mycryptocheckout' ) )
->sort_order( 90 ); // Always last.
echo $tabs->render();
}
/**
@brief network_admin_menu
@since 2017-12-30 20:51:49
**/
public function network_admin_menu()
{
add_submenu_page(
'settings.php',
// Page heading
__( 'MyCryptoCheckout Settings', 'mycryptocheckout' ),
// Menu item name
__( 'MyCryptoCheckout', 'mycryptocheckout' ),
'manage_options',
'mycryptocheckout',
[ &$this, 'admin_menu_tabs' ]
);
}
}
|
import { AvatarSizeValue, AvatarTokenSet } from './Avatar.types';
/**
* Additional state properties needed by Avatar's styling
*/
export interface AvatarStyleProps {
size?: AvatarSizeValue;
tokens?: AvatarTokenSet;
inactive?: boolean;
activeRing?: boolean;
activeShadow?: boolean;
activeGlow?: boolean;
}
/**
* Updates Avatar's state object with style-specific properties
*/
export declare const calcAvatarStyleProps: (state: Readonly<import("./Avatar.types").AvatarProps>) => AvatarStyleProps;
|
import Strategy from "./strategy/Strategy.js";
import JSONStrategy from './strategy/JSONStrategy.js';
import Config from "./Config.js";
Strategy.registerForFiles("json", JSONStrategy);
try {
require.resolve("yaml");
Strategy.registerForFiles(["yaml", "yml"], require("./strategy/YAMLStrategy.js").default)
} catch (e) {
Strategy.refisterErrorForFiles(["yaml", "yml"], "Module YAML not installed.");
}
export default Config;
export { Strategy }
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace RecyclingStation.BusinestLayer.Factories
{
using System.Reflection;
using RecyclingStation.BusinestLayer.Contracts.Factories;
using RecyclingStation.WasteDisposal.Interfaces;
public class WasteFactory:IWasteFactory
{
private const string GarbageSuffix = "Garbage";
public IWaste Create(string name, double weight, double volumePerKg, string type)
{
string fullTypeName = type + GarbageSuffix;
Type garbageTypeToActivate = Assembly.GetExecutingAssembly()
.GetTypes().FirstOrDefault(t => t.Name.Equals(fullTypeName, StringComparison.OrdinalIgnoreCase));
object[] typeArgs = new object[] {name, weight, volumePerKg};
// IWaste waste = (IWaste)Activator.CreateInstance(garbageTypeToActivate, name,weight,volumePerKg);
IWaste waste = (IWaste)Activator.CreateInstance(garbageTypeToActivate, typeArgs);
return waste;
}
}
}
|
{ @abstract(This file is part of the KControls component suite for Delphi and Lazarus.)
@author(Tomas Krysl)
Copyright (c) 2020 Tomas Krysl<BR><BR>
<B>License:</B><BR>
This code is licensed under BSD 3-Clause Clear License, see file License.txt or https://spdx.org/licenses/BSD-3-Clause-Clear.html.
}
unit klabels; // lowercase name because of Lazarus/Linux
{$include kcontrols.inc}
{$WEAKPACKAGEUNIT ON}
interface
uses
{$IFDEF FPC}
LCLType, LCLIntf, LMessages, LCLProc, LResources,
{$ELSE}
Windows, Messages,
{$ENDIF}
Classes, Controls, Forms, Graphics, StdCtrls, KFunctions, KControls
{$IFDEF USE_THEMES}
, Themes
{$IFNDEF FPC}
, UxTheme
{$ENDIF}
{$ENDIF}
;
type
TKGradientLabel = class(TKCustomControl)
private
BM: TBitmap;
FLeftColor,
FRightColor,
FDividerColor: TColor;
FDividerWidth: Integer;
FColorStep: Integer;
FCaptionWidth: Integer;
procedure SetLeftColor(Value: TColor);
procedure SetRightColor(Value: TColor);
procedure SetDividerColor(Value: TColor);
procedure SetDividerWidth(Value: Integer);
procedure SetColorStep(Value: Integer);
procedure SetCaptionWidth(Value: Integer);
procedure WMEraseBkGnd(var Msg: TLMessage); message LM_ERASEBKGND;
procedure CMTextChanged(var Msg: TLMessage); message CM_TEXTCHANGED;
protected
procedure Paint; override;
procedure Resize; override;
public
constructor Create(AOwner: TComponent); override;
destructor Destroy; override;
published
property Align;
property Anchors;
property Caption;
property CaptionWidth: Integer read FCaptionWidth write SetCaptionWidth default 50;
property ColorStep: Integer read FColorStep write SetColorStep default 50;
property Constraints;
property DividerColor: TColor read FDividerColor write SetDividerColor default clBlack;
property DividerWidth: Integer read FDividerWidth write SetDividerWidth default 2;
property Font;
property LeftColor: TColor read FLeftColor write SetLeftColor default clNavy;
property RightColor: TColor read FRightColor write SetRightColor default clBlue;
end;
{ TKLinkLabel }
TKLinkLabel = class(TLabel)
private
FHotColor: TColor;
FLinkColor: TColor;
FShowURLAsHint: Boolean;
FURL: string;
procedure CMMouseEnter(var Message: TLMessage); message CM_MOUSEENTER;
procedure CMMouseLeave(var Message: TLMessage); message CM_MOUSELEAVE;
procedure CMFontChanged(var Message: TLMessage); message CM_FONTCHANGED;
procedure SetHotColor(Value: TColor);
procedure SetLinkColor(const Value: TColor);
protected
FActiveColor: TColor;
FMouseInControl: Boolean;
procedure Loaded; override;
procedure Paint; override;
public
constructor Create(AOwner: TComponent); override;
procedure Click; override;
published
property HotColor: TColor read FHotColor write SetHotColor default clRed;
property LinkColor: TColor read FLinkColor write SetLinkColor default clBlue;
property ShowURLAsHint: Boolean read FShowURLAsHint write FShowURLAsHint;
property URL: string read FURL write FURL;
end;
implementation
uses
Math, SysUtils, KGraphics;
{ TKGradientLabel }
constructor TKGradientLabel.Create(AOwner: TComponent);
begin
inherited;
BM := TBitmap.Create;
{$IFNDEF FPC}
BM.IgnorePalette := True;
{$ENDIF}
Caption := '';
FLeftColor := clNavy;
FRightColor := clBlue;
FDividerColor := clBlack;
FDividerWidth := 2;
Font.Color := clWhite;
Font.Name := 'Arial';
Font.Height := 20;
Font.Style := [fsBold];
FColorStep := 50;
Width := 50;
Height := 30;
FCaptionWidth := 50;
end;
destructor TKGradientLabel.Destroy;
begin
inherited;
BM.Free;
end;
procedure TKGradientLabel.Resize;
begin
FCaptionWidth := Width;
Invalidate;
inherited;
end;
procedure TKGradientLabel.SetDividerColor(Value: TColor);
begin
if Value <> FDividerColor then
begin
FDividerColor := Value;
Invalidate;
end;
end;
procedure TKGradientLabel.SetDividerWidth(Value: Integer);
begin
if Value <> FDividerWidth then
begin
FDividerWidth := Value;
Invalidate;
end;
end;
procedure TKGradientLabel.SetLeftColor(Value: TColor);
begin
if Value <> FLeftColor then
begin
FLeftColor := Value;
Invalidate;
end;
end;
procedure TKGradientLabel.SetRightColor(Value: TColor);
begin
if Value <> FRightColor then
begin
FRightColor := Value;
Invalidate;
end;
end;
procedure TKGradientLabel.SetCaptionWidth(Value: Integer);
begin
if Value <> FCaptionWidth then
begin
FCaptionWidth := Value;
Invalidate;
end;
end;
procedure TKGradientLabel.SetColorStep(Value: Integer);
begin
Value := Max(Value, 1);
Value := Min(Value, 255);
if Value <> FColorStep then
begin
FColorStep := Value;
Invalidate;
end;
end;
procedure TKGradientLabel.WMEraseBkGnd(var Msg: TLMessage);
begin
Msg.Result := 1;
end;
procedure TKGradientLabel.Paint;
begin
if Width > 0 then
begin
BM.Width := Width;
BM.Height := Max(Height - FDividerWidth, 1);
with BM.Canvas do
begin
if FLeftColor <> FRightColor then
begin
DrawGradientRect(BM.Canvas, Rect(0, 0, BM.Width, BM.Height), FLeftColor, FRightColor, FColorStep, True);
end else
begin
Brush.Color := FLeftColor;
FillRect(Rect(0, 0, BM.Width, BM.Height));
end;
Font := Self.Font;
SetBkMode(Handle, TRANSPARENT);
TextOut(Max((FCaptionWidth - TextWidth(Caption)) div 2, 10),
(Height - Font.Height) div 2, Caption);
end;
with Canvas do
begin
Draw(0,0, BM);
if FDividerWidth > 0 then
begin
Pen.Color := FDividerColor;
Brush.Color := FDividerColor;
Rectangle(0, Max(Height - FDividerWidth, 0), Width, Height);
end;
end;
end;
end;
procedure TKGradientLabel.CMTextChanged(var Msg: TLMessage);
begin
inherited;
Invalidate;
end;
{ TKLinkLabel }
constructor TKLinkLabel.Create(AOwner: TComponent);
begin
inherited;
FMouseInControl := False;
FShowURLAsHint := True;
ShowHint := True;
FHotColor := clRed;
FLinkColor := clBlue;
FActiveColor := FLinkColor;
FURL := 'http://example.com';
Caption := FURL;
Cursor := crHandPoint;
end;
procedure TKLinkLabel.Paint;
begin
if csDesigning in ComponentState then
Font.Color := FLinkColor
else
Font.Color := FActiveColor;
inherited;
end;
procedure TKLinkLabel.Click;
begin
inherited;
OpenURLWithShell(FURL);
end;
procedure TKLinkLabel.SetHotColor(Value: TColor);
begin
if Value <> FHotColor then
begin
FHotColor := Value;
if FMouseInControl then
Invalidate;
end;
end;
procedure TKLinkLabel.SetLinkColor(const Value: TColor);
begin
if Value <> FLinkColor then
begin
FLinkColor := Value;
if not FMouseInControl then
Invalidate;
end;
end;
procedure TKLinkLabel.Loaded;
begin
inherited Loaded;
FActiveColor := FLinkColor;
end;
procedure TKLinkLabel.CMMouseEnter(var Message: TLMessage);
begin
inherited;
{ Don't draw a border if DragMode <> dmAutomatic since this button is meant to
be used as a dock client. }
if not (csDesigning in ComponentState) and not FMouseInControl
and Enabled and (DragMode <> dmAutomatic) and (GetCapture = 0) then
begin
FMouseInControl := True;
FActiveColor := FHotColor;
if FShowURLAsHint then
Hint := FURL;
Invalidate;
end;
end;
procedure TKLinkLabel.CMMouseLeave(var Message: TLMessage);
begin
inherited;
if not (csDesigning in ComponentState) and FMouseInControl and Enabled then
begin
FMouseInControl := False;
FActiveColor := FLinkColor;
if FShowURLAsHint then
Hint := '';
Invalidate;
end;
end;
procedure TKLinkLabel.CMFontChanged(var Message: TLMessage);
begin
Invalidate;
end;
end.
|
{-# LANGUAGE TypeFamilies #-}
module Books.HaskellInDepth.DataFamily () where
import Control.Conditional (if')
import Data.Word
data family XList a
newtype instance XList () = XListUnit Word -- List of units isomorphic to number
data instance XList Bool = XBits Word32 Word -- Bit set
class XListable a where
xempty :: XList a
xcons :: a -> XList a -> XList a
xheadMay :: XList a -> Maybe a
instance XListable () where
xempty = XListUnit 0
xcons _ (XListUnit n) = XListUnit (n + 1)
xheadMay (XListUnit 0) = Nothing
xheadMay _ = Just ()
instance XListable Bool where
xempty = XBits 0 0
xcons b (XBits bits n) = XBits (bits * 2 + if' b 1 0) (n + 1)
xheadMay (XBits _ 0) = Nothing
xheadMay (XBits bits _) = Just (odd bits)
|
using System;
using System.Text.RegularExpressions;
class Program
{
static void Main()
{
string url = Console.ReadLine();
Regex regex = new Regex(@"(?<protocol>.+)\:\/\/(?<server>\w+\.\w+)(?<resource>.+)*");
Match urlElements = regex.Match(url);
Console.WriteLine("[protocol] = " + urlElements.Groups["protocol"].Value);
Console.WriteLine("[server] = " + urlElements.Groups["server"].Value);
Console.WriteLine("[resource] = " + urlElements.Groups["resource"].Value);
}
}
|
package com.example.util
import java.io.InputStream
import play.api.libs.json._
import play.api.libs.json.Json._
import org.scalactic._
package object json {
/** [[play.api.libs.json.Json#parse(String)]] normally throws an exception if there's an error
* during parsing.. This function does the same thing without throwing exceptions.
*/
def parseSafe(s: String): JsValue Or Throwable = attempt(parse(s))
/** [[play.api.libs.json.Json#parse(Array[Byte])]] normally throws an exception if there's an
* error during parsing.. This function does the same thing without throwing exceptions.
*/
def parseSafe(bytes: Array[Byte]): JsValue Or Throwable = attempt(parse(bytes))
/** [[play.api.libs.json.Json#parse(InputStream)]] normally throws an exception if there's an
* error during parsing.. This function does the same thing without throwing exceptions.
*/
def parseSafe(is: InputStream): JsValue Or Throwable = attempt(parse(is))
}
|
require_relative '../_lib'
class FilterTest < Critic::Unit::Test
include Configatron::Integrations::Minitest
class FakeFilter
def generate_xml_properties
1
end
end
class FakeFilterSet
def filters
[FakeFilter.new]
end
end
class FakeRequestor
attr_reader :create_filter_count
def initialize(error)
@error = error
@create_filter_count = 0
end
def create_filter(fake_filter_text)
@create_filter_count += 1
raise @error
end
end
def a_user
@user ||= Gaps::DB::User.new
end
describe "#self.upload_to_gmail" do
before do
Gaps::Filter.stubs(:translate_to_gmail_britta_filters)
Gaps::Filter.stubs(:create_filterset).returns(FakeFilterSet.new)
end
it "does not retry on Google::APIClient::ServerError" do
fake_requestor = FakeRequestor.new(Google::APIClient::ServerError.new("Backend Error"))
a_user.stubs(:requestor).returns(fake_requestor)
Gaps::Filter.upload_to_gmail(a_user)
assert_equal 1, fake_requestor.create_filter_count
end
it "retries on StandardError" do
Gaps::Filter.stubs(:sleep) # sleeps during retry
fake_requestor = FakeRequestor.new(StandardError.new)
a_user.stubs(:requestor).returns(fake_requestor)
Gaps::Filter.upload_to_gmail(a_user)
assert fake_requestor.create_filter_count > 2
end
end
end
|
namespace Chushka.App.Controllers
{
using System.Linq;
using Chushka.Models;
using Models.BindingModels;
using SoftUni.WebServer.Common;
using SoftUni.WebServer.Mvc.Attributes.HttpMethods;
using SoftUni.WebServer.Mvc.Attributes.Security;
using SoftUni.WebServer.Mvc.Interfaces;
public class UserController : BaseController
{
private int roleId = 2;
[HttpGet]
public IActionResult Login()
{
this.ViewData["error"] = string.Empty;
return this.View();
}
[HttpPost]
public IActionResult Login(LoginUserBindingModel model)
{
using (this.Context)
{
var passHash = PasswordUtilities.GetPasswordHash(model.Password);
var user = this.Context.Users.SingleOrDefault(u =>
u.Username == model.Username && u.PasswordHash == passHash);
if (user == null)
{
this.ViewData["error"] = "Invalid username or password";
return this.View();
}
var roles = new string[] { user.RoleId.ToString() }; // ????
this.SignIn(user.Username, user.Id, roles);
var a = this.User.IsAuthenticated;
}
return this.RedirectToAction("/");
}
[HttpGet]
public IActionResult Register()
{
this.ViewData["error"] = string.Empty;
return this.View();
}
[HttpPost]
public IActionResult Register(RegisterUserBindingModel model)
{
if (!this.IsValidModel(model) || model.Password != model.ConfirmPassword)
{
this.ViewData["error"] = "Invalid credentials";
return this.View();
}
using (this.Context)
{
if (this.Context.Users.Any(u => u.Username == model.Username))
{
this.ViewData["error"] = "User already exist";
return this.View();
}
if (!this.Context.Users.Any())
{
this.roleId = 1;
}
var user = new User()
{
Username = model.Username,
Email = model.Email,
FullName = model.FullName,
PasswordHash = PasswordUtilities.GetPasswordHash(model.Password),
RoleId = this.roleId
};
this.Context.Add(user);
this.Context.SaveChanges();
}
return this.RedirectToAction("/");
}
[HttpGet]
[Authorize]
public IActionResult Logout()
{
this.SignOut();
return this.RedirectToAction("/");
}
}
}
|
package jp.numero.dagashiapp.data
import android.content.Context
import androidx.datastore.core.DataStore
import androidx.datastore.preferences.core.Preferences
import androidx.datastore.preferences.core.booleanPreferencesKey
import androidx.datastore.preferences.core.edit
import androidx.datastore.preferences.core.stringPreferencesKey
import androidx.datastore.preferences.preferencesDataStore
import dagger.hilt.android.qualifiers.ApplicationContext
import jp.numero.dagashiapp.model.Theme
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.map
import javax.inject.Inject
import javax.inject.Singleton
private const val preferenceName = "config"
private val Context.dataStore: DataStore<Preferences> by preferencesDataStore(name = preferenceName)
@Singleton
class ConfigDataSource(
private val dataStore: DataStore<Preferences>
) {
@Inject
constructor(
@ApplicationContext context: Context
) : this(context.dataStore)
private val themeKey = stringPreferencesKey(themeKeyName)
private val applyDynamicColorKey = booleanPreferencesKey(applyDynamicColorKeyName)
val theme: Flow<Theme> = dataStore.data.map {
val themeName = it[themeKey] ?: Theme.default.name
checkNotNull(Theme.toList().find { theme -> theme.name == themeName })
}
val applyDynamicColor: Flow<Boolean> = dataStore.data.map { it[applyDynamicColorKey] ?: true }
suspend fun updateTheme(theme: Theme) {
dataStore.edit {
it[themeKey] = theme.name
}
}
suspend fun updateApplyDynamicColor(applyDynamicColor: Boolean) {
dataStore.edit {
it[applyDynamicColorKey] = applyDynamicColor
}
}
companion object {
private const val themeKeyName = "theme"
private const val applyDynamicColorKeyName = "applyDynamicColor"
}
}
|
const readFile = require('fs-readfile-promise');
var fs = require('fs');
var readline = require('readline');
var google = require('googleapis');
var googleAuth = require('google-auth-library');
var characterBuilder = require('./characterBuilder.js');
var SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly'];
var TOKEN_DIR = (process.env.HOME || process.env.HOMEPATH || process.env.USERPROFILE) + '/.credentials/';
var TOKEN_PATH = TOKEN_DIR + 'sheets.googleapis.com-nodejs-readedit.json';
var exports = module.exports = {};
var auth;
exports.readSheet = async function(sheet) {
const secret = await getClientSecret();
auth = await authorize(secret);
return await getCharacter(auth, sheet);
};
async function getClientSecret() {
try {
const content = await readFile('client_secret.json');
return JSON.parse(content);
} catch (err) {
console.log("Error handling client secret file: " + err);
}
}
async function authorize(credentials) {
var clientSecret = credentials.installed.client_secret;
var clientId = credentials.installed.client_id;
var redirectUrl = credentials.installed.redirect_uris[0];
var auth = new googleAuth();
var oauth2Client = new auth.OAuth2(clientId, clientSecret, redirectUrl);
try {
const token = await readFile(TOKEN_PATH);
oauth2Client.credentials = JSON.parse(token);
return oauth2Client;
} catch (err) {
return getNewToken(oauth2Client);
}
}
function getNewToken(oauth2Client) {
var authUrl = oauth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES
});
console.log('Authorise this app by visiting this url: ', authUrl);
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question('Enter the code from that page here: ', function(code) {
rl.close();
oauth2Client.getToken(code, function(err, token) {
if (err) {
console.log('Error while trying to retrieve access token', err);
return;
}
oauth2Client.credentials = token;
storeToken(token);
return oauth2Client;
//callback(oauth2Client);
});
});
}
function storeToken(token) {
try {
fs.mkdirSync(TOKEN_DIR);
} catch (err) {
if (err.code != 'EEXIST') {
throw err;
}
}
fs.writeFile(TOKEN_PATH, JSON.stringify(token));
console.log('Token stored to ' + TOKEN_PATH);
}
async function getCharacter(auth, sheet) {
const response = await getSheetsValues({
auth: auth,
spreadsheetId: sheet.id,
range: 'Data!A1:B241',
});
return characterBuilder.buildCharacter(response.values, sheet);
}
function getSheetsValues(params) {
return new Promise((resolve, reject) => {
var sheets = google.sheets('v4');
sheets.spreadsheets.values.get(params, (err, response) => {
if (err) {
reject();
} else {
resolve(response);
}
});
});
}
|
/**
* @Author : yantodev
* mailto: [email protected]
* link : http://yantodev.github.io/
*/
function updateSalary(title,id, qty, price) {
Swal.fire({
title: `Update ${title}`,
html: `
<div>
<input type="hidden" id="id" class="swal2-input" placeholder="id" value="${id}">
QTY<input type="number" id="qty" class="swal2-input" placeholder="qty" value="${qty}">
PRICE<input type="number" id="price" class="swal2-input" placeholder="price" value="${price}">
</div>
`,
focusConfirm: false,
showCancelButton: true,
confirmButtonText: 'Update',
showLoaderOnConfirm: true,
preConfirm: () => {
const id = Swal.getPopup().querySelector('#id').value
const qty = Swal.getPopup().querySelector('#qty').value
const price = Swal.getPopup().querySelector('#price').value
$.ajax({
type: 'POST',
url: url + `/salary/update_salary`,
data: {
id: id,
qty: qty,
price: price
},
dataType: 'json',
beforeSend: function(e) {
if (e && e.overrideMimeType) {
e.overrideMimeType("application/json;charset=UTF-8");
}
},
success: function () {
}
})
},
allowOutsideClick: () => !Swal.isLoading()
}).then((result) => {
if (result.isConfirmed) {
Swal.fire({
icon: "success",
title: "Data berhasil ditambahkan!!!",
})
setTimeout(function () {
window.location.reload(1)
}, 1000);
}
})
}
|
# frozen_string_literal: true
source "https://rubygems.org"
gemspec
gem 'rails', '< 4'
gem 'test-unit', '~> 3.0'
|
# git - the simple guide
- - -
## git - the simple guide
### just a simple guide for getting started with git
- - -
## download and install
* [dowload git for OSX](http://code.google.com/p/git-osx-installer/downloads/list?can=3)
* or use [homebrew](http://mxcl.github.com/homebrew/):
* install brew: `$> /usr/bin/ruby -e "$(curl -fsSL https://raw.github.com/gist/323731)"`
* install git through brew: `$> brew install git`
* [download git for Linux](http://book.git-scm.com/2_installing_git.html)
* [download git for Windows](http://code.google.com/p/msysgit/downloads/list?can=3)
- - -
## setup
* [setup git for OSX](http://help.github.com/mac-set-up-git/)
* [setup git for Linux](http://help.github.com/linux-set-up-git/)
* [setup git for Windows](http://help.github.com/win-set-up-git/)
- - -
## create a new repository
To create a new git repository,
create a new directory, open it and perform a
```
git init
```
- - -
## checkout a repository
Create a working copy of a local repository by running the command
```
git clone /path/to/repository
```
When using a remote server, the command will be
```
git clone username@host:/path/to/repository
```
- - -
## workflow
Your local repository consists of three "trees" maintained by git:
* your `Working Directory` which holds the actual files
* the `Index` which acts as a staging area
* the `HEAD` which points to the last commit you've made

- - -
## add & commit
To propose changes (add it to the **Index**) use
```
git add <filename>
```
```
git add . -A
```
This is the first step in the basic git workflow.
To actually commit these changes use
```
git commit -a -m "Commit message"
```
Now the file is committed to the **HEAD**, but not in your remote repository yet.
- - -
## pushing changes
Your changes are now in the **HEAD** of your local working copy.
To send those changes to your remote repository, execute
```
git push origin master
```
Change master to whatever branch you want to push your changes to.
If you have not cloned an existing repository and want to connect your repository to a remote server, add it with
```
git remote add origin <server>
```
Now you are able to push your changes to the selected remote server.
- - -
## branching
Branches are used to develop features isolated from each other.
The master branch is the "default" branch when you create a repository.
Use other branches for development and merge them back to the master branch upon completion.

To create a new branch named `feature_x` and switch to it use
```
git checkout -b feature_x
```
switch back to master
```
git checkout master
```
and delete the branch again
```
git branch -d feature_x
```
A branch is not available to others unless you push the branch to your remote repository
```
git push origin <branch>
```
- - -
## update & merge
To update your local repository to the newest commit, execute
```
git pull
```
in your working directory to fetch and merge remote changes.
To merge another branch into your active branch (e.g. master), use
```
git merge <branch>
```
In both cases git tries to auto-merge changes.
Unfortunately, this is not always possible and results in conflicts.
You are responsible to merge those conflicts manually by editing the files shown by git.
After changing, mark them as merged with
```
git add <filename>
```
and then commit.
Before merging changes, you can also preview them by using
```
git diff <source_branch> <target_branch>
```
- - -
## tagging
It's recommended to create tags for software releases.
This is a known concept, which also exists in SVN.
To create a new tag named 1.0.0 execute
```
git tag 1.0.0 1b2e1d63ff
```
The *1b2e1d63ff* stands for the first 10 characters of the commit id you want to reference with your tag.
To get the commit id use
```
git log
```
You can also use fewer characters of the commit id, it just has to be unique.
- - -
## replace local changes
In case you did something wrong (which for sure never happens ;) you can replace local changes using the command
```
git checkout -- <filename>
```
this replaces the changes in your working tree with the last content in HEAD.
Changes already added to the index, as well as new files, will be kept.
If you instead want to drop all your local changes and commits,
fetch the latest history from the server and point your local master branch at it like this
```
git fetch origin
git reset --hard origin/master
```
- - -
## useful hints
built-in git GUI
```
gitk
```
use colorful git output
```
git config color.ui true
```
show log on just one line per commit
```
git config format.pretty oneline
```
use interactive adding
```
git add -i
```
- - -
## links & resources
graphical clients:
* [GitX (L) (OSX, open source)](http://gitx.laullon.com/)
* [Tower (OSX)](http://www.git-tower.com/)
* [Source Tree (OSX, free)](http://www.sourcetreeapp.com/)
* [GitHub for Mac (OSX, free)](http://mac.github.com/)
guides:
* [Git Community Book](http://book.git-scm.com/)
* [Pro Git](http://progit.org/book/)
* [Think like a git](http://think-like-a-git.net/)
* [GitHub Help](http://help.github.com/)
* [A Visual Git Guide](http://marklodato.github.com/visual-git-guide/index-en.html)
more on [GitHub cheat sheets](http://help.github.com/git-cheat-sheets/).
- - -
## credits
Roger Duler, @tfnico, @fhd and Namics
Please report issues on [github](github.com)
|
package panda.mvc.view.ftl;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map.Entry;
import java.util.Properties;
import javax.servlet.ServletContext;
import freemarker.cache.TemplateLoader;
import freemarker.ext.jsp.TaglibFactory;
import freemarker.template.Configuration;
import freemarker.template.ObjectWrapper;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
import panda.io.Streams;
import panda.ioc.annotation.IocBean;
import panda.ioc.annotation.IocInject;
import panda.lang.Charsets;
import panda.log.Log;
import panda.log.Logs;
import panda.mvc.ActionContext;
import panda.mvc.MvcConstants;
import panda.mvc.view.taglib.TagLibrary;
import panda.mvc.view.taglib.TagLibraryManager;
@IocBean(create="initialize")
public class FreemarkerManager {
private static final Log LOG = Logs.getLog(FreemarkerManager.class);
public static final String KEY_JSP_TAGLIBS = "JspTaglibs";
@IocInject(value=MvcConstants.FREEMARKER_SETTINGS, required=false)
protected String settings = "freemarker.properties";
@IocInject(required=false)
protected ServletContext servletContext;
@IocInject
protected TemplateLoader templateLoader;
@IocInject(value = MvcConstants.FREEMARKER_WRAPPER_ALT_MAP, required = false)
protected boolean altMapWrapper = true;
@IocInject(value = MvcConstants.FREEMARKER_BEANWRAPPER_CACHE, required = false)
protected boolean cacheBeanWrapper;
protected String statics;
@IocInject
protected TagLibraryManager taglibManager;
protected Configuration config;
protected ObjectWrapper wrapper;
protected TaglibFactory jspTaglibs;
public Configuration getConfig() {
return config;
}
public ObjectWrapper getWrapper() {
return wrapper;
}
public Configuration getConfiguration() {
return config;
}
public ActionHash buildTemplateModel(ActionContext ac) {
return buildTemplateModel(ac, wrapper);
}
public ActionHash buildTemplateModel(ActionContext ac, ObjectWrapper wrapper) {
ActionHash model = new ActionHash(wrapper, ac);
if (taglibManager != null) {
for (Entry<String, TagLibrary> en : taglibManager.getTagLibraries().entrySet()) {
model.put(en.getKey(), en.getValue().getModels(ac));
}
}
if (jspTaglibs != null) {
model.put(KEY_JSP_TAGLIBS, jspTaglibs);
}
model.put(SafeInclude.MACRO_NAME, new SafeInclude());
return model;
}
public void initialize() throws IOException, TemplateException {
// Process object_wrapper init-param out of order:
initObjectWrapper();
if (LOG.isDebugEnabled()) {
LOG.debug("Using object wrapper of class " + wrapper.getClass().getName());
}
// create configuration
initConfiguration();
// jsp taglib support
if (servletContext != null) {
jspTaglibs = new TaglibFactory(servletContext);
}
}
/**
* Create the instance of the freemarker Configuration object.
* <p/>
* this implementation
* <ul>
* <li>obtains the default configuration from Configuration.getDefaultConfiguration()
* <li>sets up template loading from a ClassTemplateLoader and a WebappTemplateLoader
* <li>sets up the object wrapper to be the BeansWrapper
* <li>loads settings from the classpath file /freemarker.properties
* </ul>
* @throws IOException if an IO error occurred
* @throws TemplateException if a template error occurred
*/
@SuppressWarnings("deprecation")
protected void initConfiguration() throws IOException, TemplateException {
config = new Configuration();
config.setObjectWrapper(wrapper);
config.setTemplateLoader(templateLoader);
config.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
config.setDefaultEncoding(Charsets.UTF_8);
config.setLocalizedLookup(true);
config.setWhitespaceStripping(true);
loadSettings();
}
protected void initObjectWrapper() {
wrapper = new AltBeanWrapper(altMapWrapper);
((AltBeanWrapper)wrapper).setUseCache(cacheBeanWrapper);
}
/**
* Load the settings from the /freemarker.properties file on the classpath
*
* @throws IOException if an IO error occurred
* @throws TemplateException if a template error occurred
* @see freemarker.template.Configuration#setSettings for the definition of valid settings
*/
protected void loadSettings() throws IOException, TemplateException {
InputStream in = null;
try {
in = Streams.openInputStream(settings);
if (in != null) {
Properties p = new Properties();
p.load(in);
for (Object o : p.keySet()) {
String name = (String)o;
String value = (String)p.get(name);
if (name == null) {
throw new IllegalArgumentException(
"init-param without param-name. Maybe the " + settings + " is not well-formed?");
}
if (value == null) {
throw new IllegalArgumentException(
"init-param without param-value. Maybe the " + settings + " is not well-formed?");
}
config.setSetting(name, value);
}
}
}
catch (FileNotFoundException e) {
// skip
}
finally {
Streams.safeClose(in);
}
}
}
|
"""
Arguments for configuring loss surface computation.
# Parameters
- `xmin::Float32`, `xmax::Float32`, `xnum::Int32`: define span of the surface
and amount of point in the `x` direction.
- `ymin::Float32`, `ymax::Float32`, `ynum::Int32`: define span of the surface
and amount of point in the `y` direction.
- `use_gpu::Bool`: Whether to use gpu. If `true` then it is up to you to
transfer mini-batch in `evaluation_step` function to the gpu.
- `x_directions_file::Union{Nothing, String}`: If provided, directions
for `x` axis will be loaded from it. Otherwise, random initialized.
Should match in shape with model's weights.
- `y_directions_file::Union{Nothing, String}`: If provided, directions
for `y` axis will be loaded from it. Otherwise, random initialized.
Should match in shape with model's weights.
!!! note
If `use_gpu = true` then it is up to you to transfer
mini-batch in `evaluation_step` function to the gpu.
!!! note
Both x- and y- directions files in should contain directions
under `checkpoint_weights` key.
Saving weights like this in code would look like
`BSON.@save file checkpoint_weights`.
"""
@with_kw struct SurfaceArgs
xmin::Float32 = -1
xmax::Float32 = 1
xnum::Int32 = 20
ymin::Float32 = -1
ymax::Float32 = 1
ynum::Int32 = 20
batch_size::Int32 = 64
use_gpu::Bool = true
x_directions_file::Union{Nothing, String} = nothing
y_directions_file::Union{Nothing, String} = nothing
end
function evaluation_loop(
model, dataloader::DataLoader, evaluation_step::Function, args::SurfaceArgs,
)::Float32
total = Float32(length(dataloader) * dataloader.batchsize)
total_loss = 0.0f0
for batch in dataloader
batch_loss = evaluation_step(model, batch, args) * dataloader.batchsize
total_loss += batch_loss |> cpu
end
total_loss / total
end
function get_directions(
model_parameters::Params, args::SurfaceArgs,
)::Tuple{Params, Params}
directions = Array{Params}(undef, 2)
directions_files = (args.x_directions_file, args.y_directions_file)
is_targets = [isa(df, String) && isfile(df) for df in directions_files]
@inbounds for (i, (is_target, target_file)) in enumerate(zip(is_targets, directions_files))
if is_target
@load target_file checkpoint_weights
i_directions = create_directions(model_parameters, params(checkpoint_weights))
else
i_directions = create_directions(model_parameters)
end
directions[i] = i_directions
end
tuple(directions...)
end
"""
```julia
function create_surface(
model, dataloader::DataLoader, evaluation_step::Function, args::SurfaceArgs,
)
```
Create loss surface.
# Parameters
- `model`: Model to use in loss function.
- `dataloader::DataLoader`: Dataset on which to evaluate loss function.
- `evaluation_step`:
Custom-defined function which given model, mini-batch and args,
computes loss on that mini-batch.
- `args::SurfaceArgs`: Parameters used when computing surface.
!!! note
If you specified `use_gpu` in `args`, then it is up to you,
to transfer mini-batch in `evaluation_step` function to gpu.
"""
function create_surface(
model, dataloader::DataLoader, evaluation_step::Function, args::SurfaceArgs,
)
model_parameters = params(model)
directions = get_directions(model_parameters, args)
if args.use_gpu
model = model |> gpu
end
testmode!(model)
x_coordinates = range(args.xmin, stop=args.xmax, length=args.xnum) |> collect
y_coordinates = range(args.ymin, stop=args.ymax, length=args.ynum) |> collect
loss_surface = fill(-1.0f0, args.xnum, args.ynum)
it = product(enumerate(y_coordinates), enumerate(x_coordinates))
@showprogress 2 "Creating surface " for ((j, y), (i, x)) in it
shifted = shift_weights(model_parameters, directions, (x, y))
loadparams!(model, shifted)
eval_loss = evaluation_loop(model, dataloader, evaluation_step, args)
loss_surface[i, j] = eval_loss
end
(x_coordinates, y_coordinates), loss_surface
end
|
(ns ajax.edn
(:require [ajax.core :refer [map->ResponseFormat]]
[ajax.protocols :refer [-body]]
#?@ (:cljs [[cljs.reader :as edn]]
:clj [[clojure.edn :as edn]
[clojure.java.io :refer [reader]]]))
#? (:clj (:import (java.io
ByteArrayInputStream OutputStreamWriter
PushbackReader InputStreamReader
InputStream OutputStream))))
(defn edn-read [xhrio]
#? (:cljs (-> xhrio -body edn/read-string)
:clj (-> ^InputStream (-body xhrio)
(InputStreamReader. "UTF-8")
PushbackReader.
edn/read)))
(defn edn-response-format
([] (map->ResponseFormat {:read edn-read
:description "EDN"
:content-type ["application/edn"]}))
([_] (edn-response-format)))
#? (:clj (defn edn-write
[^OutputStream writer params]
(binding [*out* (OutputStreamWriter. writer "UTF-8")]
(pr params)
(flush))))
(defn edn-request-format
([] {:write #? (:cljs pr-str
:clj edn-write)
:content-type ["application/edn"]})
([_] (edn-request-format)))
|
(ns vip.data-processor.validation.db.v3-0.jurisdiction-references
(:require [vip.data-processor.validation.db.util :as util]
[korma.core :as korma]
[vip.data-processor.errors :as errors]))
(defn unmatched-jurisdiction-references [tables from-table]
(let [table (tables from-table)
table-name (:alias table)
jurisdiction-id (util/column-name table-name "jurisdiction_id")]
(korma/select table
(korma/join :left
[(:states tables) :states]
(= :states.id jurisdiction-id))
(korma/join :left
[(:localities tables) :localities]
(= :localities.id jurisdiction-id))
(korma/join :left
[(:precincts tables) :precincts]
(= :precincts.id jurisdiction-id))
(korma/join :left
[(:precinct-splits tables) :precinct_splits]
(= :precinct_splits.id jurisdiction-id))
(korma/join :left
[(:electoral-districts tables) :electoral_districts]
(= :electoral_districts.id jurisdiction-id))
(korma/where (and (not= jurisdiction-id "")
(= :states.id nil)
(= :localities.id nil)
(= :precincts.id nil)
(= :precinct_splits.id nil)
(= :electoral_districts.id nil))))))
(defn validate-jurisdiction-reference [ctx {:keys [filename table]}]
(let [unmatched-references (unmatched-jurisdiction-references
(:tables ctx) table)]
(reduce (fn [ctx unmatched-reference]
(errors/add-errors ctx :errors table (:id unmatched-reference) :unmatched-reference
(select-keys unmatched-reference [:jurisdiction_id])))
ctx unmatched-references)))
(defn validate-jurisdiction-references [{:keys [data-specs] :as ctx}]
(let [jurisdiction-tables (filter
(fn [spec] (some #{"jurisdiction_id"}
(map :name (:columns spec))))
data-specs)]
(reduce validate-jurisdiction-reference ctx jurisdiction-tables)))
|
var acoes = new Dictionary<string, Action<int>> {
{"Criar", (parametro) => Criar(parametro) },
{"Editar", (parametro) => Editar(parametro) },
{"Apagar", (parametro) => Apagar(parametro) },
{"Imprimir", (parametro) => Imprimir(parametro) }
};
acoes["Criar"](1); //executará o método Criar
//https://pt.stackoverflow.com/q/90058/101
|
struct Fitab {
Int ndata;
Doub a, b, siga, sigb, chi2, q, sigdat;
VecDoub_I &x, &y, &sig;
Fitab(VecDoub_I &xx, VecDoub_I &yy, VecDoub_I &ssig)
: ndata(xx.size()), x(xx), y(yy), sig(ssig), chi2(0.), q(1.), sigdat(0.) {
Gamma gam;
Int i;
Doub ss=0.,sx=0.,sy=0.,st2=0.,t,wt,sxoss;
b=0.0;
for (i=0;i<ndata;i++) {
wt=1.0/SQR(sig[i]);
ss += wt;
sx += x[i]*wt;
sy += y[i]*wt;
}
sxoss=sx/ss;
for (i=0;i<ndata;i++) {
t=(x[i]-sxoss)/sig[i];
st2 += t*t;
b += t*y[i]/sig[i];
}
b /= st2;
a=(sy-sx*b)/ss;
siga=sqrt((1.0+sx*sx/(ss*st2))/ss);
sigb=sqrt(1.0/st2);
for (i=0;i<ndata;i++) chi2 += SQR((y[i]-a-b*x[i])/sig[i]);
if (ndata>2) q=gam.gammq(0.5*(ndata-2),0.5*chi2);
}
Fitab(VecDoub_I &xx, VecDoub_I &yy)
: ndata(xx.size()), x(xx), y(yy), sig(xx), chi2(0.), q(1.), sigdat(0.) {
Int i;
Doub ss,sx=0.,sy=0.,st2=0.,t,sxoss;
b=0.0;
for (i=0;i<ndata;i++) {
sx += x[i];
sy += y[i];
}
ss=ndata;
sxoss=sx/ss;
for (i=0;i<ndata;i++) {
t=x[i]-sxoss;
st2 += t*t;
b += t*y[i];
}
b /= st2;
a=(sy-sx*b)/ss;
siga=sqrt((1.0+sx*sx/(ss*st2))/ss);
sigb=sqrt(1.0/st2);
for (i=0;i<ndata;i++) chi2 += SQR(y[i]-a-b*x[i]);
if (ndata > 2) sigdat=sqrt(chi2/(ndata-2));
siga *= sigdat;
sigb *= sigdat;
}
};
|
const shim = require('./shim');
const Buffer = require('./buffer')
const parse = require('./parse')
const { pbkdf2 } = require('./settings')
// This internal func returns SHA-256 hashed data for signing
const sha256hash = async (mm) => {
const m = parse(mm)
const hash = await shim.subtle.digest({name: pbkdf2.hash}, new shim.TextEncoder().encode(m))
return Buffer.from(hash)
}
module.exports = sha256hash
|
const MobileAppNav = () => {
return <nav className="mt-5 flex flex-wrap lg:flex-nowrap justify-center gap-2 gap-x-2">
<a href="/" className="transition-all transform hover:scale-105">
<img
src="https://getir.com/_next/static/images/appstore-tr-141ed939fceebdcee96af608fa293b31.svg"
className="md:h-8 lg:h-auto"
alt="App Store"
/>
</a>
<a href="/" className="transition-all transform hover:scale-105">
<img
src="https://getir.com/_next/static/images/googleplay-tr-6b0c941b7d1a65d781fb4b644498be75.svg"
className="md:h-8 lg:h-auto"
alt="Google Play Store"
/>
</a>
<a href="/" className="transition-all transform hover:scale-105">
<img
src="https://getir.com/_next/static/images/huawei-appgallery-tr-4b890fa3167bc62f9069edaf45aa7f30.svg"
className="md:h-8 lg:h-auto"
alt="App Galery"
/>
</a>
</nav>;
};
export default MobileAppNav;
|
public class BlockingQueueImpl<T> implements BlockingQueue<T>
{
private final Queue<T> q = new LinkedList<T>();
private final int maxSize;
private final Object mutex = new Object();
public BlockingQueueImpl(int maxSize) {
if (maxSize <= 0)
throw new IllegalArgumentException("maxSize=" + maxSize);
this.maxSize = maxSize;
}
public T take() throws InterruptedException {
synchronized (mutex) {
while (q.isEmpty()) mutex.wait();
mutex.notifyAll(); // Wake up threads waiting to put (or take)
return q.remove();
}
}
public void put(T obj) throws InterruptedException {
synchronized (mutex) {
while (q.size() >= maxSize) mutex.wait();
mutex.notifyAll(); // Wake up threads waiting to take (or put)
q.add(obj);
}
}
}
|
using IntervalArithmetic, IntervalRootFinding, StaticArrays
using Base.Test
@testset "1D roots" begin
rts = roots(sin, -5..5)
@test length(rts) == 4
@test length(find(x->x==:unique, [root.status for root in rts])) == 2
rts = roots(sin, -5..6, Bisection)
@test length(rts) == 3
rts = roots(sin, rts, Newton)
@test all(root.status == :unique for root in rts)
end
@testset "2D roots" begin
f(x, y) = SVector(x^2 + y^2 - 1, y - 2x)
f(X) = f(X...)
rts = roots(f, (-6..6) × (-6..6), Bisection, 1e-3)
@test length(rts) == 4
rts = roots(f, rts, Newton)
@test length(rts) == 2
end
@testset "Complex roots" begin
x = -5..6
Xc = Complex(x, x)
f(z) = z^3 - 1
rts = roots(f, Xc, Bisection, 1e-3)
@test length(rts) == 7
rts = roots(f, rts, Newton)
@test length(rts) == 3
rts = roots(f, Xc)
@test length(rts) == 3
end
# From R docs:
# https://www.rdocumentation.org/packages/pracma/versions/1.9.9/topics/broyden
@testset "3D roots" begin
function g(x)
(x1, x2, x3) = x
SVector( x1^2 + x2^2 + x3^2 - 1,
x1^2 + x3^2 - 0.25,
x1^2 + x2^2 - 4x3
)
end
X = (-5..5)
rts = roots(g, IntervalBox(X, 3))
@test length(rts) == 4
end
@testset "Stationary points" begin
f(xx) = ( (x, y) = xx; sin(x) * sin(y) )
rts = roots(∇(f), IntervalBox(-5..6, 2), Newton, 1e-5)
@test length(rts) == 25
end
|
package com.twitter.chill
import scala.collection.immutable.{HashMap, HashSet, ListMap, ListSet, Queue, TreeMap, TreeSet, WrappedString}
import scala.collection.mutable
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
class StandardDataRegistrationsSpec extends AnyWordSpec with Matchers {
s"""
|For projects using chill to persist serialized data (for example in event
|sourcing scenarios), it can be beneficial or even required to turn on the
|Kryo.setRegistrationRequired setting. For such projects, chill should provide
|registrations for the most common data structures that are likely to be
|persisted.
|
|Note that for sorted sets and maps, only the natural orderings for Byte, Short,
|Int, Long, Float, Double, Boolean, Char, and String are registered (and not for
|example the reverse orderings).
|
|In addition to what is ensured by ${classOf[KryoSpec].getSimpleName},
|the ScalaKryoInstantiator with setRegistrationRequired(true)""".stripMargin
.should {
def registrationRequiredInstantiator = new ScalaKryoInstantiator() {
override def newKryo: KryoBase = {
val k = super.newKryo
k.setRegistrationRequired(true)
k
}
}
val kryo = KryoPool.withByteArrayOutputStream(4, registrationRequiredInstantiator)
def roundtrip(original: AnyRef): Unit =
try {
val serde = kryo.fromBytes(kryo.toBytesWithClass(original))
(original, serde) match {
case (originalArray: Array[_], serdeArray: Array[_]) =>
assert(originalArray.toSeq == serdeArray.toSeq)
case _ =>
assert(serde == original)
}
} catch {
case e: Throwable =>
val message =
s"exception during serialization round trip for $original of ${original.getClass}:\n" +
e.toString.linesIterator.next
assert(false, message)
}
def tuples(count: Int): Seq[(Int, Int)] = Seq.range(0, count).map(n => (n, n + 1))
"serialize the empty map" in { roundtrip(Map()) }
"serialize the one-element map" in { roundtrip(Map(1 -> 2)) }
"serialize a filtered map" in { roundtrip(Map(1 -> 2).filterKeys(_ != 2).toMap) }
"serialize a mapped values map" in { roundtrip(Map(1 -> 2).mapValues(_ + 1).toMap) }
"serialize larger maps" in {
roundtrip(Map(tuples(2): _*), Map(tuples(3): _*), Map(tuples(4): _*), Map(tuples(5): _*))
}
"serialize the empty hash map" in { roundtrip(HashMap()) }
"serialize the one-element hash map" in { roundtrip(HashMap(1 -> 2)) }
"serialize larger hash maps" in {
roundtrip(
HashMap(tuples(2): _*),
HashMap(tuples(3): _*),
HashMap(tuples(4): _*),
HashMap(tuples(5): _*)
)
}
"serialize the empty list map" in { roundtrip(ListMap()) }
"serialize the one-element list map" in { roundtrip(ListMap(1 -> 2)) }
"serialize larger list maps" in {
roundtrip(
ListMap(tuples(2): _*),
ListMap(tuples(3): _*),
ListMap(tuples(4): _*),
ListMap(tuples(5): _*)
)
}
"serialize the empty tree map" in { roundtrip(TreeMap.empty[Int, Int]) }
"serialize the one-element tree map" in { roundtrip(TreeMap(1 -> 2)) }
"serialize larger tree maps" in {
roundtrip(
TreeMap(tuples(2): _*),
TreeMap(tuples(3): _*),
TreeMap(tuples(4): _*),
TreeMap(tuples(5): _*)
)
}
"serialize the empty set" in { roundtrip(Set()) }
"serialize larger sets" in {
roundtrip(Set(1), Set(1, 2), Set(1, 2, 3), Set(1, 2, 3, 4), Set(1, 2, 3, 4, 5))
}
"serialize the empty hash set" in { roundtrip(HashSet()) }
"serialize the one-element hash set" in { roundtrip(HashSet(1)) }
"serialize larger hash sets" in {
roundtrip(HashSet(1, 2), HashSet(1, 2, 3), HashSet(1, 2, 3, 4), HashSet(1, 2, 3, 4, 5))
}
"serialize the empty list set" in { roundtrip(ListSet()) }
"serialize the one-element list set" in { roundtrip(ListSet(1)) }
"serialize larger list sets" in {
roundtrip(ListSet(1, 2), ListSet(1, 2, 3), ListSet(1, 2, 3, 4), ListSet(1, 2, 3, 4, 5))
}
"serialize the empty tree set" in { roundtrip(TreeSet.empty[Int]) }
"serialize the one-element tree set" in { roundtrip(TreeSet(1)) }
"serialize larger tree sets" in {
roundtrip(TreeSet(1, 2), TreeSet(1, 2, 3), TreeSet(1, 2, 3, 4), TreeSet(1, 2, 3, 4, 5))
}
"serialize a map's key set" in { roundtrip(Map(1 -> 2).keySet) }
"serialize the empty list" in { roundtrip(Nil) }
"serialize the one-element list" in { roundtrip(List(1)) }
"serialize alternative ways to instantiate lists" in { roundtrip(List.empty[Int], 1 :: Nil) }
"serialize larger lists" in {
roundtrip(List(1, 2), List(1, 2, 3), List(1, 2, 3, 4), List(1, 2, 3, 4, 5))
}
"serialize the empty queue" in { roundtrip(Queue.empty[Int]) }
"serialize the no-elements queue" in { roundtrip(Queue()) }
"serialize larger queues" in {
roundtrip(Queue(1), Queue(1, 2), Queue(1, 2, 3), Queue(1, 2, 3, 4), Queue(1, 2, 3, 4, 5))
}
"serialize a range" in { roundtrip(Range(2, 10, 3)) }
"serialize vectors" in {
roundtrip(
Vector(),
Vector(1),
Vector(1, 2),
Vector(1, 2, 3),
Vector(1, 2, 3, 4),
Vector(1, 2, 3, 4, 5)
)
}
"serialize the empty stream" in { roundtrip(Stream()) }
"serialize the one-element stream" in { roundtrip(Stream(1)) }
"serialize larger streams" in {
roundtrip(Stream(1, 2), Stream(1, 2, 3), Stream(1, 2, 3, 4), Stream(1, 2, 3, 4, 5))
}
"serialize the options" in { roundtrip(None, Some(1), Option.empty[Int], Option(3)) }
"serialize the eithers" in { roundtrip(Left(2), Right(4), Left.apply[Int, Int](3)) }
"serialize the empty array" in { roundtrip(Array()) }
"serialize empty Int arrays" in { roundtrip(Array.empty[Int]) }
"serialize Int arrays" in { roundtrip(Array(4, 2)) }
"serialize empty Short arrays" in { roundtrip(Array.empty[Short]) }
"serialize Short arrays" in { roundtrip(Array(3.toShort, 4.toShort)) }
"serialize empty Byte arrays" in { roundtrip(Array.empty[Byte]) }
"serialize Byte arrays" in { roundtrip(Array(3.toByte, 4.toByte)) }
"serialize empty Long arrays" in { roundtrip(Array.empty[Long]) }
"serialize Long arrays" in { roundtrip(Array(3L, 5L)) }
"serialize empty Float arrays" in { roundtrip(Array.empty[Float]) }
"serialize Float arrays" in { roundtrip(Array(3f, 5.3f)) }
"serialize empty Double arrays" in { roundtrip(Array.empty[Double]) }
"serialize Double arrays" in { roundtrip(Array(4d, 3.2d)) }
"serialize empty Boolean arrays" in { roundtrip(Array.empty[Boolean]) }
"serialize Boolean arrays" in { roundtrip(Array(true, false)) }
"serialize empty Char arrays" in { roundtrip(Array.empty[Char]) }
"serialize Char arrays" in { roundtrip(Array('a', 'b')) }
"serialize empty String arrays" in { roundtrip(Array.empty[String]) }
"serialize String arrays" in { roundtrip(Array("a", "")) }
"serialize empty Object arrays" in { roundtrip(Array.empty[Object]) }
"serialize Object arrays" in { roundtrip(Array("a", List())) }
"serialize empty Any arrays" in { roundtrip(Array.empty[Any]) }
"serialize Any arrays" in { roundtrip(Array("a", 3, Nil)) }
"serialize the empty wrapped array" in { roundtrip(mutable.WrappedArray.empty[Object]) }
"serialize empty Int wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Byte]())) }
"serialize Int wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Byte](1, 3))) }
"serialize empty Byte wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Short]())) }
"serialize empty Short wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Int]())) }
"serialize empty Long wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Long]())) }
"serialize empty Float wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Float]())) }
"serialize empty Double wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Double]())) }
"serialize empty Boolean wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Boolean]())) }
"serialize empty Char wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[Char]())) }
"serialize empty String wrapped arrays" in { roundtrip(mutable.WrappedArray.make(Array[String]())) }
"serialize wrapped strings" in { roundtrip(new WrappedString("abc")) }
}
}
|
module.exports = function (center, footer) {
return {
reserveSpace: function () {
center.reserveSpace();
}
}
};
|
use std::fmt::Display;
use askama::Template;
use graphql::{Query as GraphQLQuery, Pageable, HasPageInfo, HasTotalCount, PageInfo, Paged};
use author::Author;
#[derive(Template)]
#[template(path = "pull_requests.graphql")]
pub struct Query<S> where S : AsRef<str> + Display {
pub owner: S,
pub repository: S,
pub after: Option<String>,
}
impl<S> Query<S> where S : AsRef<str> + Display {
pub fn new(owner: S, repository: S) -> Self {
Query { owner, repository, after: None }
}
}
impl<S> GraphQLQuery for Query<S> where S : AsRef<str> + Display + Copy {
type Result = Response;
fn query(&self) -> String {
self.render().unwrap()
}
}
impl<S> Pageable for Query<S> where S : AsRef<str> + Display + Copy {
type Item = PullRequest;
fn after(&self, cursor: String) -> Self {
Query{ owner: self.owner, repository: self.repository, after: Some(cursor) }
}
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct PullRequest {
pub number: usize,
pub url: String,
pub state: String,
pub title: String,
pub body: String,
pub created_at: String,
pub updated_at: String,
pub closed_at: Option<String>,
pub author: Option<Author>,
pub merged: bool,
pub merged_at: Option<String>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct PullRequestNode {
pull_requests: Paged<PullRequest>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Response {
repository: PullRequestNode,
}
impl HasPageInfo for Response {
fn page_info(&self) -> &PageInfo {
&self.repository.pull_requests.page_info
}
}
impl HasTotalCount for Response {
fn total_count(&self) -> usize {
self.repository.pull_requests.total_count
}
}
impl IntoIterator for Response {
type Item = PullRequest;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.repository.pull_requests.into_iter()
}
}
|
import { useInfiniteQuery } from "react-query"
import { request } from "../utils/axios"
const fetchImages = (input: string, pageParam: number) => {
return request({ url: `/search/photos?per_page=20&page=${pageParam}&query=${input}` })
}
export const useSearchImages = (input: string) => {
return useInfiniteQuery(["img-query", input], ({ pageParam = 1 }) => fetchImages(input, pageParam), {
getNextPageParam: (lastPage, pages) => {
if (pages.length < pages[0].data.total_pages) {
return pages.length + 1
} else {
return undefined
}
},
})
}
|
require "active_support/core_ext/class/subclasses"
module Muhimbi
class Converter
attr_accessor :file_content,
:file,
:options
def initialize(opts={})
unless opts[:file].blank?
self.file_content = encode_file(opts[:file])
self.file = opts[:file]
end
self.tap do |client|
client.options ||= {}
client.defaults_options(opts)
client.options ||= opts
yield client if block_given?
end
end
def convert
wsdl_res = Muhimbi::Client.client.call(:convert , message: self.options )
wsdl_res.to_hash[:convert_response][:convert_result]
end
def method_missing(meth, opts = {})
merge_options meth, opts
end
def defaults_options(opts={})
self.sourceFile(self.file_content)
self.openOptions({
"FileExtension" => File.extname( self.file ).gsub(".", ""),
"OriginalFileName"=> File.basename( self.file)
})
self.conversionSettings({
"Format" => "PDF",
"Fidelity"=> "Full"
})
end
private
def allowed_methods
["sourceFile", "openOptions", "conversionSettings"]
end
def merge_options(name, opts)
name = name.to_s.camelize(:lower)
@options.merge! "ns:#{name}" => redo_opts(opts) if allowed_methods.include?(name)
end
def redo_opts(opts)
return opts unless opts.class == Hash
Hash[opts.map {|k, v| ["ns1:#{k}", v] }]
end
def encode_file(file)
Base64.strict_encode64(file.read)
end
end
end
|
require_relative 'helper'
require 'sidekiq/api'
class TestScheduler < Sidecloq::Test
describe 'scheduler' do
let(:specs) do
{test: {'cron' => '1 * * * *', 'class' => 'DummyJob', 'args' => []}}
end
let(:schedule) { Sidecloq::Schedule.new(specs) }
let(:scheduler) { Sidecloq::Scheduler.new(schedule) }
before { Sidekiq.redis(&:flushdb) }
it 'blocks when calling run' do
# initialization on this thread seems to prevent some kind testing
# deadlock
# TODO: investigate why....
scheduler
@unblocked = false
t = Thread.new do
scheduler.run
raise 'Did not block' unless @unblocked
end
# for some reason rbx doesn't seem to allow the thread to run
# appropriately without a small sleep here. would be nice to remove
# this, but for now is necessary for test to work
if RUBY_ENGINE == 'rbx'
sleep(1)
end
scheduler.wait_for_loaded
@unblocked = true
scheduler.stop(1)
t.join
end
it 'pushes jobs through sidekiq client' do
scheduler.safe_enqueue_job('test', specs[:test])
assert_equal 1, Sidekiq::Stats.new.enqueued
end
it 'does not raise errors when job spec is bad' do
scheduler.safe_enqueue_job('bad', {})
assert_equal 0, Sidekiq::Stats.new.enqueued
end
it 'has a unqiue JID for each enqueue call' do
jid_1 = scheduler.safe_enqueue_job('test', specs[:test])
jid_2 = scheduler.safe_enqueue_job('test', specs[:test])
refute_nil jid_1
refute_nil jid_2
refute_equal jid_1, jid_2
end
end
end
|
# 悟空笔记 <!-- {docsify-ignore-all} -->
<p align='center'>同学们,</p>
<p align='center'>该文档定位<span>Spring Boot</span>进阶篇,</p>
<p align='center'>上一篇为「青铜篇」,故该篇称为「白银篇」。</p>
## 开发环境
- jdk 8.0
- Spring Boot 2.x
- Maven 3.x
- JetBrain Idea 旗舰版
## 笔记目录
- [MD 目录结构-Github专属](SUMMARY.md)
## 参考文献
<br>
<br>
<br>
## 关于作者
<br>
<br>
<br>
## 贡献
- 金丝猴们,点击「编辑本页」可以参与悟空笔记修改和补充
- 欢迎大家指点修正
<br>
<br>
<br>
|
// +build js
package fs
import (
"os"
"syscall/js"
"github.com/hack-pad/hackpad/internal/process"
"github.com/pkg/errors"
)
func open(args []js.Value) ([]interface{}, error) {
fd, err := openSync(args)
return []interface{}{fd}, err
}
func openSync(args []js.Value) (interface{}, error) {
if len(args) == 0 {
return nil, errors.Errorf("Expected path, received: %v", args)
}
path := args[0].String()
flags := os.O_RDONLY
if len(args) >= 2 {
flags = args[1].Int()
}
mode := os.FileMode(0666)
if len(args) >= 3 {
mode = os.FileMode(args[2].Int())
}
p := process.Current()
fd, err := p.Files().Open(path, flags, mode)
return fd, err
}
|
import { IUser } from "./user";
import { ITweet } from "./tweet";
export interface IUserLookup {
data:[IUser],
includes:{
tweets: ITweet[],
}
}
|
(ns json-rpc.client
(:refer-clojure :exclude [send]))
(defprotocol Client
"A JSON-RPC client."
(open [this url] "Opens a connection to the given URL")
(send [this conneciton message] "Sends a JSON-RPC request to the open connection")
(close [this conneciton] "Closes the connection"))
|
import React from "react";
import { NodeProps } from "react-flow-renderer";
import Node from "components/Node";
import useConstantSourceNode from "hooks/nodes/useConstantSourceNode";
function ConstantSource({ data, id, selected, type }: NodeProps) {
const { offset = 1, onChange } = data;
useConstantSourceNode(id, { offset });
return (
<Node id={id} outputs={["output"]} title={`Constant: ${offset}`} type={type}>
{selected && (
<div className="customNode_editor nodrag">
<div className="customNode_item">
<input
onChange={e => onChange({ offset: +e.target.value })}
style={{ width: "100%" }}
title="Offset"
type="number"
value={offset}
/>
</div>
</div>
)}
</Node>
);
}
export default React.memo(ConstantSource);
|
import { Repository, getConnection } from 'typeorm';
import { Arg, Mutation, Query, Resolver, Int } from 'type-graphql';
import { InjectRepository } from 'typeorm-typedi-extensions';
import { Experience } from '../entity/experience';
import { Skill } from '../entity/skill';
@Resolver()
export class ExperienceResolver {
public constructor(
@InjectRepository(Experience)
private readonly experienceRepository: Repository<Experience>,
@InjectRepository(Skill)
private readonly skillRepository: Repository<Skill>
) {}
@Mutation(() => Experience)
public async createExperience(
@Arg('image') image: string,
@Arg('title') title: string,
@Arg('company') company: string,
@Arg('description') description: string,
@Arg('startDate') startDate: Date,
@Arg('endDate', { nullable: true }) endDate: Date
): Promise<Experience> {
return this.experienceRepository
.create({
image,
title,
company,
description,
startDate,
endDate
})
.save();
}
@Mutation(() => Boolean)
public async deleteExperience(
@Arg('experienceId', () => Int) experienceId: number
): Promise<boolean> {
await this.experienceRepository.delete({ id: experienceId });
return true;
}
@Query(() => [Experience])
public async experiences(): Promise<Experience[]> {
return this.experienceRepository.find();
}
// Using RelationQueryBuilder
// Faster than with the save
// Problem "ER_DUP_ENTRY: Duplicate entry '1-1' for key 'PRIMARY'"
// https://github.com/typeorm/typeorm/issues/3459
@Mutation(() => Experience)
public async addExperienceSkillV2(
@Arg('experienceId', () => Int) experienceId: number,
@Arg('skillId', () => Int) skillId: number
): Promise<void> {
const experience = await this.experienceRepository.findOne({
id: experienceId
});
const skill = await this.skillRepository.findOne({ id: skillId });
if (!experience) {
throw new Error('Invalid experience ID');
}
if (!skill) {
throw new Error('Invalid skill ID');
}
await getConnection()
.createQueryBuilder()
.relation(Experience, 'skills')
.of(experience)
.add(skill);
}
// Alternative to RelationQueryBuilder with bulky save method call
// Much worse perfomance
@Mutation(() => Experience)
public async addExperienceSkill(
@Arg('experienceId', () => Int) experienceId: number,
@Arg('skillId', () => Int) skillId: number
): Promise<Experience> {
const experience = await this.experienceRepository.findOne({
id: experienceId
});
const skill = await this.skillRepository.findOne({ id: skillId });
if (!experience) {
throw new Error('Invalid experience ID');
}
if (!skill) {
throw new Error('Invalid skill ID');
}
if ((await experience.skills).some(skills => skills.id === skillId)) {
throw new Error('Skill Id already added');
}
(await experience.skills).push(skill);
return this.experienceRepository.save(experience);
}
@Query(() => [Skill])
public async getExperienceSkills(
@Arg('experienceId', () => Int) experienceId: number
): Promise<Skill[]> {
const experience = await this.experienceRepository.findOne({
id: experienceId
});
if (!experience) {
throw new Error('Invalid experience ID');
}
return experience.skills;
}
}
|
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.jenkins.plugins.stacksmith;
import javax.annotation.concurrent.Immutable;
import com.google.auto.value.AutoValue;
/**
* Encapsulates a reference to a Stacksmith Stack.
*/
@AutoValue
@Immutable
public abstract class StackReference {
private static final String DOCKERFILE_URL_POSTFIX = ".dockerfile";
StackReference() {}
/**
* @return the Stack ID string as used in the Stacksmith API.
*/
public abstract String getId();
/**
* @return the Stacksmith API URL for this stack.
*/
public abstract String getStackUrl();
/**
* @return the Stacksmith API URL for the Dockerfile associated with this
* stack.
*/
public String getDockerfileUrl() {
return getStackUrl() + DOCKERFILE_URL_POSTFIX;
}
public static Builder builder() {
return new AutoValue_StackReference.Builder();
}
/**
* Builder class for {@link StackReference}.
*/
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setId(String id);
public abstract Builder setStackUrl(String stackUrl);
public abstract StackReference build();
}
}
|
// authentication
export const LOGIN = "LOGIN";
export const LOGOUT = "LOGOUT";
export const SET_ACCESS_TOKEN = "SET_ACCESS_TOKEN";
// entities
export const SET_ENTITIES = "SET_ENTITIES";
export const SET_ENTITY = "SET_ENTITY";
export const UPDATE_ENTITY = "UPDATE_ENTITY";
// app
export const SET_CURRENT_USER_ID = "SET_CURRENT_USER_ID";
export const SET_CURRENT_CLUB_ID = "SET_CURRENT_CLUB_ID";
export const SET_CURRENT_RACE_ID = "SET_CURRENT_RACE_ID";
export const SHARE_LOCATION = "SHARE_LOCATION";
|
using System.Collections.ObjectModel;
using System.Threading.Tasks;
using Too_Many_Things.Core.DataAccess.Models;
using Too_Many_Things.Core.ViewModels;
namespace Too_Many_Things.Core.Services
{
public interface ILocalDataStorageService
{
Task<ObservableCollection<List>> RetrieveStoredObjectAsync();
Task StoreObject(ObservableCollection<List> objectToStore);
Task ConvertAndStoreListCollectionAsync(ObservableCollection<ListViewModel> listToStore);
Task ToggleIsChecked(Entry entry, List parentList);
}
}
|
/*
* Use of this source code is governed by the MIT license that can be
* found in the LICENSE file.
*/
package org.rust.toml.inspections
import com.intellij.openapi.application.runWriteAction
import com.intellij.openapi.vfs.VirtualFile
import org.rust.*
import org.rust.cargo.project.model.cargoProjects
import org.rust.cargo.project.workspace.FeatureState
import org.rust.cargo.project.workspace.PackageFeature
import org.rust.ide.annotator.RsAnnotationTestFixture
import org.rust.ide.inspections.RsWithToolchainInspectionTestBase
import org.rust.toml.inspections.MissingFeaturesInspectionTest.Context
class MissingFeaturesInspectionTest : RsWithToolchainInspectionTestBase<Context>(MissingFeaturesInspection::class) {
fun `test missing dependency feature`() = doTest(
pkgWithFeature = "bar",
featureName = "feature_bar",
fileToCheck = "foo/src/main.rs"
) {
toml("Cargo.toml", """
[workspace]
members = ["foo", "bar"]
""")
dir("foo") {
toml("Cargo.toml", """
[package]
name = "foo"
version = "0.1.0"
authors = []
[dependencies]
bar = { path = "../bar", features = ["feature_bar"] }
""")
dir("src") {
rust("main.rs", """
fn main() {}
""")
}
}
dir("bar") {
toml("Cargo.toml", """
[package]
name = "bar"
version = "0.1.0"
authors = []
[features]
feature_bar = [] # disabled
""")
dir("src") {
rust("lib.rs", "")
}
}
}
fun `test missing required target feature`() = doTest(
pkgWithFeature = "hello",
featureName = "feature_hello",
fileToCheck = "src/main.rs"
) {
toml("Cargo.toml", """
[package]
name = "hello"
version = "0.1.0"
authors = []
[[bin]]
name = "main"
path = "src/main.rs"
required-features = ["feature_hello"]
[features]
feature_hello = []
[dependencies]
""")
dir("src") {
rust("main.rs", """
fn main() {}
""")
rust("lib.rs", "")
}
}
fun `test missing dependency feature in manifest`() = doTest(
pkgWithFeature = "bar",
featureName = "feature_bar",
fileToCheck = "foo/Cargo.toml"
) {
toml("Cargo.toml", """
[workspace]
members = ["foo", "bar"]
""")
dir("foo") {
toml("Cargo.toml", """
[package]
name = "foo"
version = "0.1.0"
authors = []
[dependencies]
bar = { path = "../bar", features = ["feature_bar"] }
""")
dir("src") {
rust("main.rs", """
fn main() {}
""")
}
}
dir("bar") {
toml("Cargo.toml", """
[package]
name = "bar"
version = "0.1.0"
authors = []
[features]
feature_bar = [] # disabled
""")
dir("src") {
rust("lib.rs", "")
}
}
}
override fun createAnnotationFixture(): RsAnnotationTestFixture<Context> {
return object : RsAnnotationTestFixture<Context>(
this@MissingFeaturesInspectionTest,
myFixture,
inspectionClasses = listOf(inspectionClass)
) {
override fun configureByFile(file: VirtualFile, context: Context?) {
require(context != null)
myFixture.configureFromExistingVirtualFile(file)
runWriteAction {
with(myFixture.editor.document) {
setText("<warning descr=\"Missing features: ${context.pkgWithFeature}/${context.featureName}\">$text</warning>")
}
}
}
}
}
override fun configureProject(fileTree: FileTree, context: Context?): VirtualFile {
require(context != null)
val testProject = fileTree.create()
val cargoProject = project.cargoProjects.singleProject()
val pkg = cargoProject.workspaceOrFail().packages.find { it.name == context.pkgWithFeature }
?: error("Package ${context.pkgWithFeature} not found")
project.cargoProjects.modifyFeatures(cargoProject, setOf(PackageFeature(pkg, context.featureName)), FeatureState.Disabled)
return testProject.file(context.fileToCheck)
}
private fun doTest(
pkgWithFeature: String,
featureName: String,
fileToCheck: String,
builder: FileTreeBuilder.() -> Unit
) = check(context = Context(pkgWithFeature, featureName, fileToCheck), builder = builder)
data class Context(val pkgWithFeature: String, val featureName: String, val fileToCheck: String)
}
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Serialization;
using UnityEngine.UI;
public class UIManager : MonoBehaviour
{
[SerializeField] private GameObject HomeGo;
[SerializeField] private GameObject InGameGo;
[SerializeField] private GameObject GameOverGo;
[SerializeField] private Text scoreTextInGame;
[SerializeField] private Text scoreTextGameOver;
[SerializeField] private Text hSTextGameOver;
[SerializeField] private Text hSTextHome;
// Update is called once per frame
void Update()
{
switch (GameStatus.GameState)
{
case GameStatus.GameStateEnum.Home:
HomeGo.SetActive(true);
InGameGo.SetActive(false);
GameOverGo.SetActive(false);
hSTextHome.text = ("High Score : " + ScoreManager.highScore);
break;
case GameStatus.GameStateEnum.InGame:
HomeGo.SetActive(false);
InGameGo.SetActive(true);
GameOverGo.SetActive(false);
scoreTextInGame.text = ("Score : " + ScoreManager.score);
break;
case GameStatus.GameStateEnum.GameOver:
HomeGo.SetActive(false);
InGameGo.SetActive(false);
GameOverGo.SetActive(true);
scoreTextGameOver.text = ("Score : " + ScoreManager.score);
if (ScoreManager.newHS)
hSTextGameOver.text = ("New High Score !");
else hSTextGameOver.text = ("High Score : " + ScoreManager.highScore);
break;
}
}
}
|
var searchData=
[
['position',['position',['../structALF__jit__buf.html#a6a35d3d3e3a0033c118119bdec30df10',1,'ALF_jit_buf']]]
];
|
# Examples
This document will show you several examples of KiKit CLI for panelization. Note
that this is **not an exhaustive description** of everything that KiKit can do,
nor proper documentation. For further details, please refer to:
- [installation guide](installation.md)
- [description of all panelization options](panelizeCli.md)
- [more detail about KiKit's algorithm for tab creation](understandingTabs.md)
- [reference for the Python interface](panelization.md)
We will show everything on a single board located in
`doc/resources/conn.kicad_pcb`. The board looks like this when rendered via
PcbDraw:

# Basic panels & layout
Let's start with our first panel.
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2;' \
--tabs full \
--cuts vcuts \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

We specified that we want 2x2 panel, no space between board and separate them by
V-cuts. We also specified that we want to build full tabs (although no tabs are
visible in this example). This is ,however, essential – if we omitted tabs, no
cuts between the boards would be performed. Note, that due to the rounded
corners, this panel cannot be manufactured. We will fix it later.
Note that the `\` in the input is there to make shell happy, so we can break our
command into multiple lines. Also note that there are single quotes around the
key-value pair – again, to make shell happy and to interpret a string with
spaces as a single option.
Also note that KiKit accepts all options in categories (e.g., `layout`, `tabs`,
`cuts`, ...). You can specify the parameters as a semicolon-separated key-value
list. To learn about the precise syntax of the CLI and about all options, please
refer to – [documentation](panelizeCli.md).
One side note – if you try it with your own board some components might be gone.
KiKit respects the KiCAD component selection criteria. When you specify an input
rectangle, only the components that **fully fit** inside the input rectangle are
selected. This however take in account **both name and value labels** (even when
they are hidden).
When you do not specify the source are explicitly, KiKit takes the board outline
bounding box as the source area. Therefore, by default, components outside the
board substrate are not copied to panel.
Note that this is intended behavior; for once it is consistent with KiCAD
behavior of user selection and also it allows to easily ignore surrounding
comments and drawings in the board sheet (it makes no sense to have 12 same
copies of the notes around the board).
How to include the missing components?
- specify the source area explicitly to include all your components
- specify `--source 'tolerance: 10mm'` to enlarge the board outline bounding box
by e.g. 10 mm. The default value is 5 mm.
I told you that the panel above is not suitable for manufacturing. Let's see why:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2;' \
--tabs full \
--cuts vcuts \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

We specified a milling simulation post-processing. This simulates the milling
operation in the fab house. As you can see, the sharp internal corners cannot be
manufactured. I recommend you to use milling postprocessing always – you can
easily see if your cuts are off or you have too narrow slots in your design.
Usually, one would use full tabs only for rectangular boards. Usually, when you
have rounded corners, you will use short tabs instead and add some space between
the boards. So let's fix it:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; hwidth: 10mm; vwidth: 15mm' \
--cuts vcuts \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

In that way, the rounded corners can be machined. Lets' see the same example
with mousebites instead:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 5mm' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

We changed cut type to mousebites and we specified that they should be
performed by 0.5mm holes with a spacing of 1 mm. You could also use inches if
you want – just specify `<number>in. Since we use mousebites, we used narrower
tabs. We also specified that the cuts should be inset 0.25 mm into the board
outline. This is suitable when your board should fit into a cover – when you
break away the tabs, all burs will be inside the intended board outline.
What happens, when we simulate the milling operation?
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 5mm' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

See? The cuts are somewhat short. This is due to the internal corners that
cannot be milled. KiKit can fix that for you – just specify you want to prolong
your cuts tangentially by a small amount:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 3mm' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

If you want, you can also specify a number of tabs to generate. KiKit will place
them evenly:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 3mm; vcount: 2' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

You can also append frame or rails to the panel. Frames and rail are useful in
the following situations:
- you want to assemble your board, so you need tooling holes, fiducial.
- you want to append a text to board (e.g., to identify a manufacturing batch)
- your boards are not rectangluar and you want to use V-Cuts (most manufactures
require the outer edge of the panel to be a rectangle in order to manufacture
V-Cuts)
Let's start with rails:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 3mm; vcount: 2' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

Similarly, you can add left and right rail via the `railslr` type. If you want
a full frame, use the type `frame`. When you place a full frame, it might make
sense to include cuts in the corner of the frame, so you can break it apart
easily. Let's see an example:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 3mm; vcount: 2' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'frame; width: 5mm; space: 3mm; cuts: true' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

When you use V-cuts it might make sense to not remove all material, but only
mill a slot around the board of the board. This yields a stronger panel – and
some manufacturers require such style for assembly with V-Cuts. This is achieved
via framing type `tightframe`. Note that it does not make much sense with
mousebites.
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 6mm' \
--tabs 'fixed; width: 3mm; vcount: 2' \
--cuts vcuts \
--framing 'tightframe; width: 5mm; space: 3mm; ' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

Once we have a frame, we can append a tooling holes, fiducials and some text to
it:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 3mm; vcount: 2' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--tooling '3hole; hoffset: 2.5mm; voffset: 2.5mm; size: 1.5mm' \
--fiducials '3fid; hoffset: 5mm; voffset: 2.5mm; coppersize: 2mm; opening: 1mm;' \
--text 'simple; text: yaqwsx's panel; anchor: mt; voffset: 2.5mm; hjustify: center; vjustify: center;' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

There are many options for text and fiducials. Be sure to read the [full
documentation](panelizeCli.md).
# Advanced features & layouts
It is possible that you have some critical features you want to avoid with tabs.
KiKit has several features that can help you. Let's start with the simple ones.
First, you can rotate the boards in your layout. This might make not much sense
for rectanglar boards, but it might save you when you have circular or oddly
shaped boards:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 0mm; rotation: 45deg;' \
--tabs 'fixed; width: 3mm;' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.75mm' \
--framing 'frame; width: 5mm; space: 3mm; cuts: true' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

When your board has a connector sticking one one side of the board, it makes
sense to rotate the boards every other column, row or combination of both. KiKit
supports this via layout option `alternation`. You should be careful about
component references when rotating boards – KiCAD's references have a property
"Stay upright" which makes them always face up (even when placed on a panel). So
be sure to turn it off before panelizing. Here's an example:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 3mm; alternation: cols;' \
--tabs 'fixed; width: 3mm; vcount: 2' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'frame; width: 5mm; space: 3mm; cuts: true' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

Another solution might be to not put tabs on, e.g., vertical edges of the PCB.
However, in that case your panel might be weak for further assembly. You can
make it more stiff by including backbones – a full piece of substrate between
the panels. Note that adding a backbone does not extend space between boards -
that's up to you. You can add either vertical, horizontal or both backbones.
Also, similarly with frames, you can put cuts on your backbone to make
depanelization of your boards easier. Enough theory, let's see an example
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; hspace: 2mm; vspace: 9mm; hbackbone: 5mm; hbonecut: true' \
--tabs 'fixed; width: 3mm; vcount: 2; hcount: 0' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

The most powerful feature of KiKit regarding tab placement are tabs via
annotation. Remember our test board? When you open it in Pcbnew, you can see
that there are some special footprints – KiKit's annotations:

They specify where to place tabs. You can even specify individual tab width via
text property of the symbol. How to use it? Just specify tab type to
`annotation`. We also have to increase the source area tolerance, so it can
capture the annotations.
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 5mm;' \
--tabs annotation \
--source 'tolerance: 15mm' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

Well, the panel looks strange – right? That's because KiKit always constructs a
half-bridges. When you specify the tabs location, you have to either ensure they
match or put a piece of substrate they can reach – e.g., a backbone or a
tightframe. If you are interested in the details, read more about tabs in
section [Understanding tabs](understandingTabs.md). Let's fix it:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 8mm; hbackbone: 3mm; vbackbone: 3mm' \
--tabs annotation \
--source 'tolerance: 15mm' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--post 'millradius: 1mm' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

Note that the annotation can have an arbitrary orientation. The arrow just must
be outside board edge and points towards it. KiKit will also place only those
tabs, that have a neighboring substrate. For precise algorithm, see section
[understanding tabs](understandingTabs.md).
When you make flex PCBs or you want to save etchant, it make sense to pour
copper on all non-functional parts of the panel. It will make the PCB rigid. You
can do so via `copperfill` post-processing operation:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 3mm;' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--post 'millradius: 1mm; copperfill: true' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

When you use V-cuts with `copperfill` you (or your fab house) might want to
include a clearance around the V-cuts:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; hwidth: 10mm; vwidth: 15mm' \
--cuts 'vcuts; clearance: 1.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--post 'millradius: 1mm; copperfill: true' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

Note one last facts about V-cuts. V-cuts can only be straight and
horizontal/vertical. But you can use them with circular boards if you want by
cutting a little inside them. The option `cutcurves`, that will approximate the
cut by staring and ending point.
# I would like... but KiKit does not support it!
If you need something special; e.g., custom placement of tooling holes, multiple
texts, etc. KiKit has you covered.
The CLI interface allows you to run a custom script over the final panel. The
script can use KiKit Python interface to modify it. For the sake of simplicity,
let's add a hole in the middle of the frame. Therefore, we write the following
script:
```.py
from kikit.units import mm
from pcbnew import wxPoint
def kikitPostprocess(panel, arg):
minx, miny, maxx, maxy = panel.panelBBox()
position = wxPoint((minx + maxx) / 2, miny + 2 * mm)
panel.addNPTHole(position, 3 * mm)
```
Then run KiKit:
```
kikit panelize \
--layout 'grid; rows: 2; cols: 2; space: 2mm' \
--tabs 'fixed; width: 3mm; vcount: 2' \
--cuts 'mousebites; drill: 0.5mm; spacing: 1mm; offset: 0.2mm; prolong: 0.5mm' \
--framing 'railstb; width: 5mm; space: 3mm;' \
--post 'millradius: 1mm; script: doc/resources/examplePost.py' \
doc/resources/conn.kicad_pcb panel.kicad_pcb
```

You can learn more about available functions from the comment in the source code
or in [documentation](panelization.md).
If you implement a feature that your fab house requires (e.g., new tooling hole
type), consider submitting a pull request for KiKit instead. I believe the
others will benefit from it.
# Managing presets
The last section of this document is dedicated to management of presets. You can
read the specification in the [documentation for CLI](panelizeCli.md). Here I
would like to focus on practical examples.
As you should know from the documentation, the panelization preset is divided
into sections; e. g., `layout`, `tabs`, etc. The key-value parameters in these
sections can be specified via JSON files. In KiKit, you can specify these files
via `-p` option:
```
kikit panelize -p myPreset.json -p :<builtInPreset> <other parameters>
```
The parameters in the later specified presets override the parameters in the
previously specified presets. This allows you to define a named piece-wise
presets. Therefore, you can prepare various presets for mousebites – e.g.,
`fineMousebites.json` and `coarseMousebites.json`:
```.js
// fineMousebites.json
{
"cuts": {
"type": "mousebites",
"drill": "0.5mm",
"spacing": "0.9mm",
"offset": "0.25mm"
}
}
// coarseMousebites.json
{
"cuts": {
"type": "mousebites",
"drill": "0.3mm",
"spacing": "0.2mm",
"offset": "0.15mm"
}
}
```
Then you can specify your panelization commands easily via:
```
kikit panelize -p fineMousebites.json <otheroptions>
```
Therefore, you can build a custom library of commonly used-options; e.g., per
fabrication house. KiKit offers some built-in presets – see
[`panelizePresets`](../kikit/resources/panelizePresets). Note that the built-in
preset `default.json` is always used as a base and it specifies conservative
default values so you can only override the options relevant for you.
To give you an example – with KiKit, you will no longer have to remember what
diameter of tooling holes JLC PCB requires, just use:
```
kikit panelize -p :jlcTooling <otheroptions>
```
|
require "haml"
module HamlI18nLint
# Run lint and report the result.
class Runner
# @param options [Options] options
# @return [Runner] new runner to run lint with given options
def initialize(options)
@options = options
@config = ::HamlI18nLint::Config.new(@options)
@linter = HamlI18nLint.linter.new(@config)
end
# Run lint and report the result
# @return [true, false] all of the files passed lint or not.
def run
@config.files.map do |file|
result = lint(file)
if result.success?
true
else
@config.report(result)
false
end
end.all?
end
private
def lint(filename)
template = File.read(filename)
@linter.lint(filename: filename, template: template)
end
end
end
|
#!/bin/bash
if test ! -d $HOME/fish/bin; then
cd $HOME
wget $FISH_RELEASE
tar -xzf $(basename $FISH_RELEASE)
rm $(basename $FISH_RELEASE)
pushd $(basename ${FISH_RELEASE%.tar.gz})
./configure --prefix=$HOME/fish
make
make install
popd
rm -rf $(basename ${FISH_RELEASE%.tar.gz})
else
echo Using cached fish install
fi
export PATH=$HOME/fish/bin:$PATH
|
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Housekeeping extends Model
{
protected $fillable
= [
'guest_id',
'order_date',
'bed_sheets',
'cleaning',
'minibar',
'blanket',
'toiletries',
'pillow',
];
protected $attributes
= [
'service_id' => 9,
];
public function guest()
{
return $this->belongsTo(Guest::class);
}
/**
* @return \___PHPSTORM_HELPERS\static|array|mixed
*/
public static function getAllHousekeepingOrders()
{
$housekeepings = self::all();
if (count($housekeepings) > 0) {
$serviceName = Services::getServiceName($housekeepings[0]->service_id);
foreach ($housekeepings as $key => $housekeeping) {
$housekeeping->serviceName = $serviceName;
$housekeeping->roomNumber = ($housekeeping->guest->rooms[0]->number)
? $housekeeping->guest->rooms[0]->number
: 'HousekeepingErr id:' . $housekeeping->id;
}
} else {
$housekeepings = [];
}
return $housekeepings;
}
public static function getOrderHistoryByGuest($guestId)
{
$housekeepings = self::where('guest_id', $guestId)->get();
if (count($housekeepings) > 0) {
$serviceName = Services::getServiceName($housekeepings[0]->service_id);
foreach ($housekeepings as $key => $housekeeping) {
$housekeeping->serviceName = $serviceName;
$housekeeping->roomNumber = ($housekeeping->guest->rooms[0]->number)
? $housekeeping->guest->rooms[0]->number
: 'HousekeepingErr id:' . $housekeeping->id;
}
} else {
$housekeepings = [];
}
return $housekeepings;
}
}
|
/* ************************************************************************
*
* Zen [and the art of] CMS
*
* https://zenesis.com
*
* Copyright:
* 2019-2022 Zenesis Ltd, https://www.zenesis.com
*
* License:
* MIT (see LICENSE in project root)
*
* Authors:
* John Spackman ([email protected], @johnspackman)
*
* ************************************************************************ */
qx.Class.define("zx.app.utils.LoginFormDlg", {
extend: zx.ui.utils.AbstractDialog,
construct() {
this.base(arguments, "Login");
this.setLayout(new qx.ui.layout.VBox());
this.add(this.getQxObject("edLogin"));
this.add(this.getQxObject("buttonBar"));
},
members: {
/**
* @Override
*/
_createQxObjectImpl(id) {
switch (id) {
case "btnSubmit":
var btn = new qx.ui.form.Button(
"Login",
"@FontAwesomeSolid/sign-in-alt/16"
);
btn.addListener("execute", this._submitDialogClicked, this);
return btn;
case "edLogin":
return new zx.app.utils.LoginForm();
}
return this.base(arguments, id);
},
/**
* @Override
*/
async submitDialog() {
let ed = this.getQxObject("edLogin");
let username = ed.getQxObject("edtUsername").getValue().trim();
let password = ed.getQxObject("edtPassword").getValue().trim();
if (!username || !password) {
zx.ui.utils.MessageDlg.showError(
"Please provide a username and password to login"
);
return;
}
let loginApi = await qx.core.Init.getApplication()
.getNetController()
.getUriMapping("zx.server.auth.LoginApi");
let result = await loginApi.loginUser(username, password);
if (!result || result.status != "ok") {
zx.ui.utils.MessageDlg.showError(
"Incorrect username or password, please try again"
);
return;
}
let user = await loginApi.getCurrentUser();
qx.core.Init.getApplication().setUser(user);
return this.base(arguments);
}
}
});
|
/*
This code is part of the WhiteboxTools geospatial analysis library.
Authors: Dr. John Lindsay
Created: 27/09/2018
Last Modified: 27/09/2018
License: MIT
*/
use super::Point2D;
const MULTIPLICATIVE_EPSILON: f64 = 1f64 + 1e-14;
#[derive(Default, Copy, Clone, Debug, PartialEq)]
pub struct Circle {
pub center: Point2D, // Center
pub radius: f64, // Radius
}
impl Circle {
/// Creates a new Circle,
pub fn new(center: Point2D, radius: f64) -> Circle {
Circle {
center: center,
radius: radius,
}
}
pub fn contains(&self, p: Point2D) -> bool {
self.center.distance(&p) <= self.radius * MULTIPLICATIVE_EPSILON
}
}
|
(ns example.core
(:require [cljs.spec.alpha :as s]
[clojure.string :as string]
[expound.alpha :refer [expound]]))
(s/def ::left-paren #(= % "("))
(s/def ::right-paren #(= % ")"))
(s/def ::space (fn [x] (and (string? x) (pos? (count x)) (string/blank? x))))
(s/def ::token (fn [x] (contains? #{"a" "b" "c"} x)))
(s/def
::example
(s/cat
:left-paren ::left-paren
:expr (s/+ (s/alt :token ::token :space ::space :child ::example))
:right-paren ::right-paren))
(let [data (seq "(aa b (a (b)))")]
(if (s/valid? ::example data)
(println (pr-str (s/conform ::example data)))
(println (s/explain ::example data))))
|
//bit shifting
fun main(args: Array<String>) {
var number = 5
var count = 1
var result = number.shl(count)
println("$number : After Left shift $count times = $result")
count = 3
result = number shl count
println("$number : After Left shift $count times = $result")
number = 5
count = 1
result = number.shr(count)
println("$number : After Right shift $count times = $result")
count = 2
result = number shr count
println("$number : Right shift $count times = $result")
number = -1
count = 1
result = number shr count
println("$number : After Right shift $count times = $result")
result = number ushr count
println("$number : Unsigned right shift $count times = $result")
}
|
# code-to-image
자바스크립트를 이용해 소스코드를 이미지로 변환합니다.
- [데모 페이지](https://soma0sd.github.io/code-to-image/)
아래 도구를 사용하였습니다.
- [highlight.js](https://highlightjs.org/): 소스코드 하이라이팅
- [html2canvas](https://html2canvas.hertzen.com/): 이미지 변환
|
/**
* Simple direct parent function to scroll a node into view for its direct parent
* @param {HTMLElement} el
*/
export function scrollIfNeeded(el: HTMLElement): void {
let parent = el.parentElement;
if (!parent) {
return;
}
let pHeight = parent.offsetHeight;
let pScroll = parent.scrollTop;
let eHeight = el.offsetHeight;
let eOffsetTop = el.offsetTop;
if (eOffsetTop < pScroll) {
// need to scroll up
parent.scrollTop = eOffsetTop - 1;
} else if (eOffsetTop + eHeight > pScroll + pHeight) {
// need to scroll down
parent.scrollTop = eOffsetTop - pHeight + eHeight + 1;
}
}
|
package digitalhouse.com.revisao.helpers;
import android.view.View;
public interface RecyclerViewClickListener {
void onClick (View view, int position);
void onLongClick (View view, int position);
}
|
package ledger_model
import "github.com/blockchain-jd-com/framework-go/crypto/framework"
/*
* Author: imuge
* Date: 2020/5/27 下午1:22
*/
type LedgerInfo struct {
Hash framework.HashDigest
LatestBlockHash framework.HashDigest
LatestBlockHeight int64
}
|
package network.commands;
import game.multiplayer.OnlineLocalGame;
import game.multiplayer.OnlineRemoteGame;
import game.multiplayer.powerUps.PowerUp;
import game.player.MultiModePlayer;
import network.Command;
/**
* Comunica l'attivazione di un powerup
*/
public class PowerUpCommand extends Command {
private static final long serialVersionUID = -539210512249000007L;
private PowerUp powerUp;
/**
* @param powerUp il powerUp attivato
*/
public PowerUpCommand(PowerUp powerUp) {
this.powerUp = powerUp;
}
@Override
public void execute(OnlineRemoteGame remoteGame, OnlineLocalGame localGame) {
if(powerUp.getAffectedGame()==PowerUp.LOCAL_GAME) {
MultiModePlayer enemy = remoteGame.getPlayer();
enemy.setCoins(enemy.getCoins() - powerUp.getPrice());
}
powerUp.execute(localGame, remoteGame);
}
}
|
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class FilmsCategory extends Model
{
public $table = 'films_category';
public $timestamps = false;
public function category(){
return $this->hasOne(Categoris::class,'id','category_id');//->select('name', 'id')->where('name','action');
}
public function films(){
return $this->hasOne(Films::class,'id','film_id');//->select('name', 'id')->where('name','action');
}
}
|
// Code generated by "stringer -type=LF_Type"; DO NOT EDIT
package logflags
import "fmt"
const (
_LF_Type_name_0 = "LF_dateLF_time"
_LF_Type_name_1 = "LF_microseconds"
_LF_Type_name_2 = "LF_longfile"
_LF_Type_name_3 = "LF_shortfile"
_LF_Type_name_4 = "LF_functionname"
_LF_Type_name_5 = "LF_prefix"
_LF_Type_name_6 = "LF_UTC"
_LF_Type_name_7 = "LF_END"
)
var (
_LF_Type_index_0 = [...]uint8{0, 7, 14}
_LF_Type_index_1 = [...]uint8{0, 15}
_LF_Type_index_2 = [...]uint8{0, 11}
_LF_Type_index_3 = [...]uint8{0, 12}
_LF_Type_index_4 = [...]uint8{0, 15}
_LF_Type_index_5 = [...]uint8{0, 9}
_LF_Type_index_6 = [...]uint8{0, 6}
_LF_Type_index_7 = [...]uint8{0, 6}
)
func (i LF_Type) String() string {
switch {
case 1 <= i && i <= 2:
i -= 1
return _LF_Type_name_0[_LF_Type_index_0[i]:_LF_Type_index_0[i+1]]
case i == 4:
return _LF_Type_name_1
case i == 8:
return _LF_Type_name_2
case i == 16:
return _LF_Type_name_3
case i == 32:
return _LF_Type_name_4
case i == 64:
return _LF_Type_name_5
case i == 128:
return _LF_Type_name_6
case i == 256:
return _LF_Type_name_7
default:
return fmt.Sprintf("LF_Type(%d)", i)
}
}
|
using System.Globalization;
namespace MusicStore.Shared.Extensions
{
public static class StringExtensions
{
public static string ToSnake(this string text)
{
return string.Concat(text.Select((x, i) =>
i > 0 && char.IsUpper(x) ? "_" + x : x.ToString(CultureInfo.InvariantCulture)))
.ToLowerInvariant();
}
public static string ToCamelFirstUpper(this string text)
{
var textInfo = new CultureInfo(CultureInfo.CurrentCulture.ToString(), false).TextInfo;
return textInfo.ToTitleCase(text).Replace("_", string.Empty);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.