text
stringlengths 27
775k
|
---|
namespace QuickGraph.Graphviz.Dot
{
public enum GraphvizLabelJustification
{
L,
R,
C
}
}
|
dnl PowerPC-64 mpn_lshift -- rp[] = up[] << cnt
dnl Copyright 2003, 2005 Free Software Foundation, Inc.
dnl This file is part of the GNU MP Library.
dnl The GNU MP Library is free software; you can redistribute it and/or modify
dnl it under the terms of the GNU Lesser General Public License as published
dnl by the Free Software Foundation; either version 2.1 of the License, or (at
dnl your option) any later version.
dnl The GNU MP Library is distributed in the hope that it will be useful, but
dnl WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
dnl or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
dnl License for more details.
dnl You should have received a copy of the GNU Lesser General Public License
dnl along with the GNU MP Library; see the file COPYING.LIB. If not, write
dnl to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
dnl Boston, MA 02110-1301, USA.
include(`../config.m4')
C cycles/limb
C POWER3/PPC630: 1.5
C POWER4/PPC970: 3.0
C INPUT PARAMETERS
define(`rp',`r3')
define(`up',`r4')
define(`n',`r5')
define(`cnt',`r6')
define(`tnc',`r5')
define(`v0',`r0')
define(`v1',`r7')
define(`u0',`r8')
define(`u1',`r9')
define(`h0',`r10')
define(`h1',`r11')
ASM_START()
PROLOGUE(mpn_lshift)
ifdef(`HAVE_ABI_mode32',
` rldicl r7, r5, 0, 32 C zero extend n
mtctr r7', C copy n to count register
` mtctr n') C copy n to count register
ifdef(`HAVE_ABI_mode32',
` rldic r0, n, 3, 32', C byte count corresponding to n
` rldicr r0, n, 3, 60') C byte count corresponding to n
add rp, rp, r0 C rp = rp + n
add up, up, r0 C up = up + n
addi rp, rp, 8 C rp now points 16 beyond end
addi up, up, -8 C up now points to last limb
subfic tnc, cnt, 64 C reverse shift count
ld u0, 0(up)
sld h0, u0, cnt
srd r12, u0, tnc C return value
bdz L(1) C jump for n = 1
ld u1, -8(up)
bdz L(2) C jump for n = 2
ldu u0, -16(up)
bdz L(end) C jump for n = 3
L(oop): srd v1, u1, tnc
sld h1, u1, cnt
ld u1, -8(up)
or h0, v1, h0
stdu h0, -16(rp)
bdz L(exit)
srd v0, u0, tnc
sld h0, u0, cnt
ldu u0, -16(up)
or h1, v0, h1
std h1, -8(rp)
bdnz L(oop)
L(end): srd v1, u1, tnc
sld h1, u1, cnt
or h0, v1, h0
stdu h0, -16(rp)
srd v0, u0, tnc
sld h0, u0, cnt
or h1, v0, h1
std h1, -8(rp)
L(1): std h0, -16(rp)
ifdef(`HAVE_ABI_mode32',
` srdi r3, r12, 32
mr r4, r12
',` mr r3, r12
')
blr
L(exit): srd v0, u0, tnc
sld h0, u0, cnt
or h1, v0, h1
std h1, -8(rp)
L(2): srd v1, u1, tnc
sld h1, u1, cnt
or h0, v1, h0
stdu h0, -16(rp)
std h1, -8(rp)
ifdef(`HAVE_ABI_mode32',
` srdi r3, r12, 32
mr r4, r12
',` mr r3, r12
')
blr
EPILOGUE()
|
function simulate(a::Array)
current_price = 100
returns_arr = Float64[]
for f in a
current_price = current_price * f + current_price
push!(returns_arr, current_price)
end
returns_arr
end
|
using System;
using System.Text.RegularExpressions;
namespace Netronics.Http
{
class UriFinder
{
private readonly Regex _rx;
private readonly Func<string[], object> _handler;
public UriFinder(string uri, Func<string[], object> handler)
{
_rx = new Regex(uri);
_handler = handler;
}
public bool IsMatch(string uri)
{
return _rx.IsMatch(uri);
}
public object GetHandler(string uri)
{
return _handler(_rx.Split(uri));
}
}
}
|
#include <stdio.h>
#include <string.h>
#include <limits.h>
/* ########################## MAZE FORMAT ##########################
first pair of numbers are the dimensions of maze (maze[n][m])
second pair of numbers are the coordinates of the starting point
next is the maze, where F is the finish point
########################## MAZE FORMAT ########################## */
unsigned n,m;
int shortest_counter = INT_MAX;
int counter;
int sw;
void print_maze_c(int maze[][m])
{
for (int i=0; i<n; i++)
{
for (int j=0; j<m; j++)
printf("%c ", maze[i][j]);
printf("\n");
}
printf("\n");
}
void copy_maze(int shortest_sol[][m], int maze[][m])
{
for (int line=0; line<n; line++)
for (int coll = 0; coll < m; coll ++)
shortest_sol[line][coll] = maze[line][coll];
}
int can_step_here(int i, int j, int maze[n][m])
{
if (i >= 0 && j >= 0 && i <= (n-1) && j <= (m-1))
if ((char)maze[i][j] == '.' || (char)maze[i][j] == 'F')
return 1;
return 0;
}
void baccu_traccu (int map[][m], int i, int j, int maze[][m], int shortest_sol[][m])
{
if( (char)maze[i][j] == 'F' )
{
sw=1;
print_maze_c(maze);
if (shortest_counter > counter)
{
shortest_counter = counter;
copy_maze(shortest_sol, maze);
}
//counter --;
return;
}
if (can_step_here(i, j, maze))
{
counter++;
if (maze[i][j]!='F')
maze[i][j]='x';
if (i < n-1)
baccu_traccu(map, i+1, j, maze, shortest_sol);
if (j < m-1)
baccu_traccu(map, i, j+1, maze, shortest_sol);
if (i > 0)
baccu_traccu(map, i-1, j, maze, shortest_sol);
if (j > 0)
baccu_traccu(map, i, j-1, maze, shortest_sol);
if (maze[i][j] != 'F')
{
counter--;
maze[i][j] = '.';
}
}
}
int main ()
{
FILE *fp = fopen ("maze.txt", "r");
fscanf(fp," %u %u \n", &n,&m);
int maze[n][m];
int map[n][m];
int shortest_sol[n][m];
int start_i;
int start_j;
fscanf(fp, " %d %d \n", &start_i, &start_j);
for (int i=0; i<n; i++)
for (int j=0; j<m; j++)
fscanf(fp," %c \n", &map[i][j]);
copy_maze(maze,map);
baccu_traccu(map, start_i, start_j, maze, shortest_sol);
if (sw)
{
printf("Shortest solution\n");
print_maze_c(shortest_sol);
}
else
printf("Sol doesn't exist");
return 0;
}
|
# TODO
## MassLoader
- Have one global IDeferAgent to
- Load all glTFs as fast as possible
- Load all glTFs as fast as possible whilst keeping the target frame rate
|
#include "Message.hpp"
Message::Message()
{
}
Message::~Message()
{
}
|
import 'package:flutter/material.dart';
import 'package:flutter_platform_widgets/flutter_platform_widgets.dart';
import 'package:mymgs/data/clubs.dart';
import 'package:mymgs/data/settings.dart';
import 'package:mymgs/data_classes/club.dart';
import 'package:mymgs/notifications/permissions.dart';
import 'package:mymgs/screens/settings/notifications.dart';
import 'package:mymgs/widgets/button.dart';
class ClubSubscriptionButton extends StatefulWidget {
final Club club;
const ClubSubscriptionButton({
required this.club,
});
_ClubSubscriptionButtonState createState() => _ClubSubscriptionButtonState();
}
class _ClubSubscriptionButtonState extends State<ClubSubscriptionButton> {
bool subscribed = false;
@override
void initState() {
isSubscribedToClub(widget.club)
.then((value) {
setState(() {
subscribed = value;
});
});
super.initState();
}
void _toggle() async {
if (!(await isNotificationAllowed('clubs'))) {
ScaffoldMessenger.of(context).showSnackBar(SnackBar(
content: Text('Please turn on club reminders to subscribe.'),
action: SnackBarAction(
label: "Configure",
onPressed: () {
Navigator.of(context).push(platformPageRoute(
context: context,
builder: (_) => NotificationSettings(),
));
},
),
));
return;
}
if (subscribed) {
await unsubscribeFromClub(widget.club);
} else {
await subscribeToClub(widget.club);
await saveSetting('tutorial_dismissed_club_subscribe', true);
}
setState(() {
subscribed = !subscribed;
});
}
@override
Widget build(BuildContext context) {
return Container(
width: double.infinity,
child: MGSButton(
label: subscribed ? "Unsubscribe" : "Subscribe",
onPressed: _toggle,
),
);
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using Orchard.Autoroute.Models;
using Orchard.ContentManagement;
using Orchard.ContentManagement.Aspects;
using Orchard.Core.Title.Models;
namespace Orchard.Taxonomies.Models {
public class TermPart : ContentPart<TermPartRecord> {
public string Name {
get { return this.As<TitlePart>().Title; }
set { this.As<TitlePart>().Title = value; }
}
public string Slug {
get { return this.As<AutoroutePart>().DisplayAlias; }
set { this.As<AutoroutePart>().DisplayAlias = value; }
}
public IContent Container {
get { return this.As<ICommonPart>().Container; }
set { this.As<ICommonPart>().Container = value; }
}
public int TaxonomyId {
get { return Retrieve(x => x.TaxonomyId); }
set { Store(x => x.TaxonomyId, value); }
}
/// <summary>
/// e.g., /; /1/; /1/2/
/// </summary>
public string Path {
get { return Retrieve(x => x.Path); }
set { Store(x => x.Path, value); }
}
public int Count {
get { return Retrieve(x => x.Count); }
set { Store(x => x.Count, value); }
}
public bool Selectable {
get { return Retrieve(x => x.Selectable); }
set { Store(x => x.Selectable, value); }
}
public int Weight {
get { return Retrieve(x => x.Weight); }
set { Store(x => x.Weight, value); }
}
public string FullPath { get { return String.Concat(Path, Id); } }
public static IEnumerable<TermPart> Sort(IEnumerable<TermPart> terms) {
var list = terms.ToList();
var index = list.ToDictionary(x => x.FullPath);
return list.OrderBy(x => x, new TermsComparer(index));
}
private class TermsComparer : IComparer<TermPart> {
private readonly IDictionary<string, TermPart> _index;
public TermsComparer(IDictionary<string, TermPart> index) {
_index = index;
}
public int Compare(TermPart x, TermPart y) {
// if two nodes have the same parent, then compare by weight, then by path
// /1/2/3 vs /1/2/4 => 3 vs 4
if (x.Path == y.Path) {
var weight = y.Weight.CompareTo(x.Weight);
if (weight != 0) {
return weight;
}
// if same parent path and same weight, compare by name
return String.Compare(x.Name, y.Name, StringComparison.OrdinalIgnoreCase);
}
// if two nodes have different parents
// if the two nodes have the same root, the deeper is after (i.e. one starts with the other)
// /1/2 vs /1/2/3 => /1/2 first
if (x.FullPath.StartsWith(y.FullPath, StringComparison.OrdinalIgnoreCase)) {
return 1;
}
if (y.FullPath.StartsWith(x.FullPath, StringComparison.OrdinalIgnoreCase)) {
return -1;
}
// otherwise compare first none matching parent
// /1/2 vs /1/3 => 2 vs 3
// /2/3 vs /4 => 2 vs 4
var xPath = x.FullPath.Split(new[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
var yPath = y.FullPath.Split(new[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
string xFullPath = "", yFullPath = "";
for (var i = 0; i < Math.Min(xPath.Length, yPath.Length); i++) {
xFullPath += "/" + xPath[i];
yFullPath += "/" + yPath[i];
if (!xFullPath.Equals(yFullPath, StringComparison.OrdinalIgnoreCase)) {
var xParent = _index[xFullPath];
var yParent = _index[yFullPath];
return Compare(xParent, yParent);
}
}
return 0;
}
}
}
}
|
using System;
using System.Collections;
using System.Text;
using FluentAssertions.Formatting;
namespace CHC.Consent.EFCore.Tests
{
public class DictionaryIdentifierFormatter : IValueFormatter
{
public bool CanHandle(object value) => value is IDictionary;
/// <inheritdoc />
public string Format(object value, FormattingContext context, FormatChild formatChild)
{
var newline = context.UseLineBreaks ? Environment.NewLine : "";
var padding = new string('\t', context.Depth);
var result = new StringBuilder($"{newline}{padding}{{");
foreach (DictionaryEntry entry in (IDictionary)value)
{
result.AppendFormat(
"[{0}]: {{{1}}},",
formatChild("Key", entry.Key),
formatChild("Value", entry.Value));
}
result.Append($"{newline}{padding}}}");
return result.ToString();
}
public static IValueFormatter Instance { get; } = new DictionaryIdentifierFormatter();
}
}
|
<?php
/**
* request.php
*
* @author panlilu
* @copyright bifubao.com
* @since 2014-08
*/
require_once("bifubao_common.php");
global $bifubao_config;
$req = req_get_params();
$req['_pid_'] = $bifubao_config['pid'];
$req['_time_'] = time();
$to_sign_data = req_make_sign_data($req);
$checksum = md5($to_sign_data.$bifubao_config['key']);
$req['_checksum_'] = $checksum;
echo build_bifubao_request_form($req);
exit;
|
import { MinePlan } from "RoomPlanner";
export function validateMinePlan(plan: Partial<MinePlan>) {
if (!plan.extractor || !plan.container) {
throw new Error(`Incomplete MinePlan`);
} else {
return plan as MinePlan;
}
}
|
%%--------------------------------------------------------------------
%% Copyright (c) 2021-2022 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(hocon_maps).
%% Deep ops of deep maps.
-export([deep_get/2, deep_put/4,
deep_merge/2]).
%% Access maybe-rich map values,
%% Always reutrn plain value.
-export([get/2, get/3]).
-export([flatten/2]).
-export([do_put/4]). %% internal
-export([ensure_plain/1, is_richmap/1]).
-include("hocon_private.hrl").
-define(EMPTY_MAP, #{}).
-type config() :: hocon:config().
%% this can be the opts() from hocon_tconf, but only `atom_key' is relevant
-type opts() :: #{atom_key => boolean(),
_ => _}.
-type flatten_opts() :: #{rich_value => boolean()}.
%% @doc put unboxed value to the richmap box
%% this function is called places where there is no boxing context
%% so it has to accept unboxed value.
-spec deep_put(string(), term(), config(), opts()) -> config().
deep_put(Path, Value, Conf, Opts) ->
put_rich(Opts, hocon_util:split_path(Path), Value, Conf).
put_rich(_Opts, [], Value, Box) ->
boxit(Value, Box);
put_rich(Opts, [Name | Path], Value, Box) ->
V0 = safe_unbox(Box),
GoDeep = fun(Elem) -> put_rich(Opts, Path, Value, Elem) end,
V = do_put(V0, Name, GoDeep, Opts),
boxit(V, Box).
do_put(V, Name, GoDeep, Opts) ->
case maybe_array(V) andalso hocon_util:is_array_index(Name) of
{true, Index} -> update_array_element(V, Index, GoDeep);
false when is_map(V) -> update_map_field(Opts, V, Name, GoDeep);
false -> update_map_field(Opts, #{}, Name, GoDeep)
end.
maybe_array(V) when is_list(V) -> true;
maybe_array(V) -> V =:= ?EMPTY_MAP.
update_array_element(?EMPTY_MAP, Index, GoDeep) ->
update_array_element([], Index, GoDeep);
update_array_element(List, Index, GoDeep) when is_list(List) ->
do_update_array_element(List, Index, GoDeep).
update_map_field(Opts, Map, FieldName, GoDeep) ->
FieldV0 = maps:get(FieldName, Map, ?EMPTY_MAP),
FieldV = GoDeep(FieldV0),
Map1 = maps:without([FieldName], Map),
Map1#{maybe_atom(Opts, FieldName) => FieldV}.
maybe_atom(#{atom_key := true}, Name) when is_binary(Name) ->
try
binary_to_existing_atom(Name, utf8)
catch
_ : _ ->
error({non_existing_atom, Name})
end;
maybe_atom(_Opts, Name) ->
Name.
safe_unbox(MaybeBox) ->
case maps:get(?HOCON_V, MaybeBox, undefined) of
undefined -> ?EMPTY_MAP;
Value -> Value
end.
boxit(Value, Box) -> Box#{?HOCON_V => Value}.
%% @doc Get value from a plain or rich map.
%% `undefined' is returned if no such value path.
%% NOTE: always return plain-value.
-spec get(string(), config(), term()) -> term().
get(Path, Config, Default) ->
case get(Path, Config) of
undefined -> Default;
V -> V
end.
%% @doc get a child node from richmap, return value also a richmap.
%% `undefined' is returned if no value path.
%% Key (first arg) can be "foo.bar.baz" or ["foo.bar", "baz"] or ["foo", "bar", "baz"].
-spec deep_get(string() | [string()], config()) -> config() | undefined.
deep_get(Path, Conf) ->
do_get(hocon_util:split_path(Path), Conf, richmap).
%% @doc Get value from a maybe-rich map.
%% always return plain-value.
-spec get(string(), config()) -> term().
get(Path, Map) ->
case is_richmap(Map) of
true ->
C = deep_get(Path, Map),
hocon_util:richmap_to_map(C);
false ->
do_get(hocon_util:split_path(Path), Map, map)
end.
do_get([], Conf, _Format) -> Conf;
do_get([H | T], Conf, Format) ->
FieldV = try_get(H, Conf, Format),
do_get(T, FieldV, Format).
try_get(_Key, undefined, _Format) ->
undefined;
try_get(Key, Conf, richmap) ->
#{?HOCON_V := V} = Conf,
try_get(Key, V, map);
try_get(Key, Conf, map) when is_map(Conf) ->
case maps:get(Key, Conf, undefined) of
undefined ->
try binary_to_existing_atom(Key, utf8) of
AtomKey -> maps:get(AtomKey, Conf, undefined)
catch
error : badarg ->
undefined
end;
Value ->
Value
end;
try_get(Key, Conf, map) when is_list(Conf) ->
try binary_to_integer(Key) of
N ->
lists:nth(N, Conf)
catch
error : badarg ->
undefined
end.
%% @doc Recursively merge two maps.
%% @see hocon:deep_merge/2 for more.
deep_merge(#{?HOCON_T := array, ?HOCON_V := V1} = Base,
#{?HOCON_T := object, ?HOCON_V := V2} = Top) ->
NewV = deep_merge2(V1, V2),
case is_list(NewV) of
true ->
%% after merge, it's still an array, only update the value
%% keep the metadata
Base#{?HOCON_V => NewV};
false ->
%% after merge, it's no longer an array, return all old
Top
end;
deep_merge(V1, V2) ->
deep_merge2(V1, V2).
deep_merge2(M1, M2) when is_map(M1) andalso is_map(M2) ->
do_deep_merge(M1, M2, fun deep_merge/2);
deep_merge2(V1, V2) ->
case is_list(V1) andalso is_indexed_array(V2) of
true -> merge_array(V1, V2);
false -> V2
end.
do_deep_merge(M1, M2, GoDeep) when is_map(M1), is_map(M2) ->
maps:fold(
fun(K, V2, Acc) ->
V1 = maps:get(K, Acc, undefined),
NewV = do_deep_merge(V1, V2, GoDeep),
Acc#{K => NewV}
end, M1, M2);
do_deep_merge(V1, V2, GoDeep) ->
GoDeep(V1, V2).
is_indexed_array(M) when is_map(M) ->
lists:all(fun(K) -> case is_array_index(K) of
{true, _} -> true;
_ -> false
end
end, maps:keys(M));
is_indexed_array(_) ->
false.
%% convert indexed array to key-sorted tuple {index, value} list
indexed_array_as_list(M) when is_map(M) ->
lists:keysort(
1, lists:map(fun({K, V}) ->
{true, I} = is_array_index(K),
{I, V}
end, maps:to_list(M))).
merge_array(Array, Top) when is_list(Array) ->
ToMerge = indexed_array_as_list(Top),
do_merge_array(Array, ToMerge).
do_merge_array(Array, []) -> Array;
do_merge_array(Array, [{I, Value} | Rest]) ->
GoDeep = fun(Elem) -> deep_merge(Elem, Value) end,
NewArray = do_update_array_element(Array, I, GoDeep),
do_merge_array(NewArray, Rest).
do_update_array_element(List, Index, GoDeep) when is_list(List) ->
MinIndex = 1,
MaxIndex = length(List) + 1,
Index < MinIndex andalso throw({bad_array_index, "index starts from 1"}),
Index > MaxIndex andalso
begin
Msg0 = io_lib:format("should not be greater than ~p.", [MaxIndex]),
Msg1 = case Index > 9 of
true ->
"~nEnvironment variable overrides applied in alphabetical "
"make sure to use zero paddings such as '02' to ensure "
"10 is ordered after it";
false ->
[]
end,
throw({bad_array_index, [Msg0, Msg1]})
end,
{Head, Tail0} = lists:split(Index - 1, List),
{Nth, Tail} = case Tail0 of
[] -> {#{}, []};
[H | T] -> {H, T}
end,
Head ++ [GoDeep(Nth) | Tail].
is_array_index(Maybe) ->
hocon_util:is_array_index(Maybe).
%% @doc Flatten out a deep-nested map to {<<"path.to.value">>, Value} pairs
%% If `rich_value' is provided `true' in `Opts', the value is a map with
%% metadata.
-spec flatten(config(), flatten_opts()) -> [{binary(), term()}].
flatten(Conf, Opts) ->
lists:reverse(flatten(Conf, Opts, undefined, [], [])).
flatten(Conf, Opts, Meta, Stack, Acc) when is_list(Conf) andalso Conf =/= [] ->
flatten_l(Conf, Opts, Meta, Stack, Acc, lists:seq(1, length(Conf)));
flatten(#{?HOCON_V := Value} = Conf, Opts, _Meta, Stack, Acc) ->
Meta = maps:get(?METADATA, Conf, undefined),
flatten(Value, Opts, Meta, Stack, Acc);
flatten(Conf, Opts, Meta, Stack, Acc) when is_map(Conf) andalso Conf =/= ?EMPTY_MAP ->
{Keys, Values} = lists:unzip(maps:to_list(Conf)),
flatten_l(Values, Opts, Meta, Stack, Acc, Keys);
flatten(Value, Opts, Meta, Stack, Acc) ->
V = case maps:get(rich_value, Opts, false) of
true -> #{?HOCON_V => Value, ?METADATA => Meta};
false -> Value
end,
[{iolist_to_binary(infix(lists:reverse(Stack), ".")), V} | Acc].
flatten_l([], _Opts, _Meta, _Stack, Acc, []) -> Acc;
flatten_l([H | T], Opts, Meta, Stack, Acc, [Tag | Tags]) ->
NewAcc = flatten(H, Opts, Meta, [bin(Tag) | Stack], Acc),
flatten_l(T, Opts, Meta, Stack, NewAcc, Tags).
bin(B) when is_binary(B) -> B;
bin(I) when is_integer(I) -> integer_to_binary(I).
infix([], _) -> [];
infix([X], _) -> [X];
infix([H | T], I) -> [H, I | infix(T, I)].
ensure_plain(M) ->
case is_richmap(M) of
true -> hocon_util:richmap_to_map(M);
false -> M
end.
%% @doc Check if it's a richmap.
%% A richmap always has a `?HOCON_V' field.
is_richmap(M) -> hocon_util:is_richmap(M).
|
package org.celtric.kotlin.html
fun param(
// Mandatory
name: String,
value: String,
// Custom
other: Attributes = emptyMap()
) = EmptyBlockElement("param", AllAttributes(mapOf(
"name" to name,
"value" to value
), other, emptyMap()))
|
package dev.aerin.managerspecials.models
data class SpecialsPage(val canvasUnit: Int, val managerSpecials: List<Special>) {
fun getUnitSize(overallSize: Int): Int {
return overallSize / canvasUnit
}
}
|
package Module04._03UserInputAndConditional;
/**
* The purpose of this program is to get the target heart rate zone of a person,
* and then see if they are in that zone... Using if/if else/else
*
* @author (your name)
* @version (a version number or a date)
*/
import java.util.Scanner;
public class TargetZone
{
public static void main(String[] args)
{
Scanner in = new Scanner(System.in);
System.out.println("Determine Your Target Heart Rate Zone For Cardiovascular Exercise");
System.out.println("(50% to 85%)");
System.out.println();
System.out.print("Enter your age: ");
int userAge = in.nextInt();
System.out.print("Enter your resting heart rate (RHR): ");
int RHR = in.nextInt();
int maxHeartRate = 220 - userAge;
int heartRateReserve = maxHeartRate - RHR;
double highEnd = (heartRateReserve * .85) + RHR;
double lowEnd = (heartRateReserve * .50) + RHR;
System.out.println();
System.out.println("Your target heart rate zone is between " + lowEnd + " and " + highEnd);
System.out.println();
System.out.println("Now do a little bit of an excersise and come back and enter your heart rate :)");
System.out.println();
System.out.print("Enter your heart rate after exercising: ");
int heartRateAfter = in.nextInt();
System.out.println();
System.out.print("After just exercising, ");
if(heartRateAfter < lowEnd)
{
System.out.println("your heart rate is lower than your target zone.");
}
else if(heartRateAfter > highEnd)
{
System.out.println("your heart rate is above your target zone.");
}
else
{
System.out.println("your heart rate is within your target zone.");
}
}
}
|
module Gitlab
module GithubImport
class Client
GITHUB_SAFE_REMAINING_REQUESTS = 100
GITHUB_SAFE_SLEEP_TIME = 500
attr_reader :access_token
def initialize(access_token)
@access_token = access_token
if access_token
::Octokit.auto_paginate = false
end
end
def api
@api ||= ::Octokit::Client.new(
access_token: access_token,
api_endpoint: github_options[:site],
# If there is no config, we're connecting to github.com and we
# should verify ssl.
connection_options: {
ssl: { verify: config ? config['verify_ssl'] : true }
}
)
end
def client
unless config
raise Projects::ImportService::Error,
'OAuth configuration for GitHub missing.'
end
@client ||= ::OAuth2::Client.new(
config.app_id,
config.app_secret,
github_options.merge(ssl: { verify: config['verify_ssl'] })
)
end
def authorize_url(redirect_uri)
client.auth_code.authorize_url({
redirect_uri: redirect_uri,
scope: "repo, user, user:email"
})
end
def get_token(code)
client.auth_code.get_token(code).token
end
def method_missing(method, *args, &block)
if api.respond_to?(method)
request(method, *args, &block)
else
super(method, *args, &block)
end
end
def respond_to?(method)
api.respond_to?(method) || super
end
private
def config
Gitlab.config.omniauth.providers.find { |provider| provider.name == "github" }
end
def github_options
if config
config["args"]["client_options"].deep_symbolize_keys
else
OmniAuth::Strategies::GitHub.default_options[:client_options].symbolize_keys
end
end
def rate_limit
api.rate_limit!
# GitHub Rate Limit API returns 404 when the rate limit is
# disabled. In this case we just want to return gracefully
# instead of spitting out an error.
rescue Octokit::NotFound
nil
end
def has_rate_limit?
return @has_rate_limit if defined?(@has_rate_limit)
@has_rate_limit = rate_limit.present?
end
def rate_limit_exceed?
has_rate_limit? && rate_limit.remaining <= GITHUB_SAFE_REMAINING_REQUESTS
end
def rate_limit_sleep_time
rate_limit.resets_in + GITHUB_SAFE_SLEEP_TIME
end
def request(method, *args, &block)
sleep rate_limit_sleep_time if rate_limit_exceed?
data = api.send(method, *args, &block)
yield data
last_response = api.last_response
while last_response.rels[:next]
sleep rate_limit_sleep_time if rate_limit_exceed?
last_response = last_response.rels[:next].get
yield last_response.data if last_response.data.is_a?(Array)
end
end
end
end
end
|
CREATE TABLE [dbo].[FileStatsLatestBaseline]
(
WaitSnapshotId bigint NOT NULL,
io_stall bigint,
io_stall_read_ms bigint,
io_stall_write_ms bigint,
num_of_reads bigint,
num_of_writes bigint,
size_on_disk_mb numeric,
database_id int,
dbname [sys].[sysname],
fileName sysname,
file_id smallint
CONSTRAINT PK_FileStatsLatestBaseline PRIMARY KEY (WaitSnapshotId,database_id, file_id )
)
|
---
date: 2012-11-01
round: Round 2
title: 'Round 2.1 --- Shell filename wildcarding'
author: Adam Obeng
permalink: /2012/11/round-2-1-shell-filename-wildcarding/
tags:
- Concept Map
---
Because it's immensely time-saving, and it eventually segues into regexes. Perhaps more importantly, it prepares the ground for string literals, metacharacters, escaping and delimiter collision.
|
#ifndef GENERAL_UTIL_H
#define GENERAL_UTIL_H
#include <stdio.h>
#include <nav_msgs/Odometry.h>
#include <geometry_msgs/PoseStamped.h>
#include <eigen_conversions/eigen_msg.h>
#include <std_msgs/Float64MultiArray.h>
#include <sensor_msgs/PointCloud.h>
Eigen::Vector3d getYPR(const geometry_msgs::Quaternion &q) {
double q0 = q.w;
double q1 = q.x;
double q2 = q.y;
double q3 = q.z;
Eigen::Vector3d ypr;
ypr(0) = atan2(2 * (q0 * q3 + q1 * q2), 1 - 2 * (q2 * q2 + q3 * q3));
ypr(1) = asin(2 * (q0 * q2 - q3 * q1));
ypr(2) = atan2(2 * (q0 * q1 + q2 * q3), 1 - 2 * (q1 * q1 + q2 * q2));
return ypr;
}
Eigen::Quaterniond getQuaternionFromYPR(const Eigen::Vector3d &ypr) {
double r2 = ypr(2) / 2;
double p2 = ypr(1) / 2;
double y2 = ypr(0) / 2;
Eigen::Quaterniond q(
cos(r2) * cos(p2) * cos(y2) + sin(r2) * sin(p2) * sin(y2),
sin(r2) * cos(p2) * cos(y2) - cos(r2) * sin(p2) * sin(y2),
cos(r2) * sin(p2) * cos(y2) + sin(r2) * cos(p2) * sin(y2),
cos(r2) * cos(p2) * sin(y2) - sin(r2) * sin(p2) * cos(y2));
return q;
}
double getYawFromEigen(const Eigen::Affine3d &dTF) {
Eigen::Matrix3d m = dTF.rotation();
Eigen::Quaterniond q(m);
double yaw = atan2(2 * (q.w() * q.z() + q.x() * q.y()),
1 - 2 * (q.y() * q.y() + q.z() * q.z()));
return yaw;
}
void printTF(const Eigen::Affine3d &TF) {
geometry_msgs::Transform transform;
tf::transformEigenToMsg(TF, transform);
double q0 = transform.rotation.w;
double q1 = transform.rotation.x;
double q2 = transform.rotation.y;
double q3 = transform.rotation.z;
Eigen::Vector3d ypr;
ypr(0) = atan2(2 * (q0 * q3 + q1 * q2), 1 - 2 * (q2 * q2 + q3 * q3));
ypr(1) = asin(2 * (q0 * q2 - q3 * q1));
ypr(2) = atan2(2 * (q0 * q1 + q2 * q3), 1 - 2 * (q1 * q1 + q2 * q2));
printf("x: %f, y: %f, z: %f\n", transform.translation.x,
transform.translation.y, transform.translation.z);
printf("yaw: %f, pitch: %f, roll: %f\n", ypr(0), ypr(1), ypr(2));
}
geometry_msgs::Pose transferEigenToPose(const Eigen::Affine3d &TF) {
geometry_msgs::Transform transform;
tf::transformEigenToMsg(TF, transform);
geometry_msgs::Pose pose;
pose.position.x = transform.translation.x;
pose.position.y = transform.translation.y;
pose.position.z = transform.translation.z;
pose.orientation.w = transform.rotation.w;
pose.orientation.x = transform.rotation.x;
pose.orientation.y = transform.rotation.y;
pose.orientation.z = transform.rotation.z;
return pose;
}
#endif
|
<?php
return [
'action' => 'Acción',
'add' => 'Agregar',
'add_new' => 'Añadir nuevo',
'ascending' => 'Ascentente',
'back' => 'Volver',
'back_to_home' => 'Volver al panel',
'background' => 'Imagen principal de fondo',
'basic' => 'Básico',
'calendar' => 'Calendario',
'cancel' => 'Cancelar',
'cancel_upload' => 'Cancelar subida',
'choose_image' => 'Escoger',
'close' => 'Cerrar',
'contact_info' => 'Información de contacto',
'date_between' => 'Fecha entre',
'date_range' => 'Rango de fecha',
'delete' => 'Eliminar',
'descending' => 'Descendiente',
'edit' => 'Editar',
'end_date' => 'Fecha final',
'feature_not_available' => 'Característica no disponible.',
'file' => 'File',
'file_not_supported' => 'Archivo no soportado.',
'filter' => 'Filtro',
'hide' => 'Ocultar',
'home' => 'Panel',
'invalid_action' => 'Esta no es una acción válida',
'invalid_link' => 'Este no es un enlace válido.',
'image' => 'Imagen',
'logo' => 'Logo',
'main' => 'Principal',
'menu' => 'Menú',
'name' => 'Nombre',
'no' => 'No',
'no_file_uploaded' => 'Archivo no cargado',
'no_result_found' => 'Sin resultados',
'order' => 'Orden',
'page_not_found_error' => '404',
'page_not_found_heading' => 'Oops! Página no encontrada.',
'page_not_found_message' => 'La página que estás solicitando no existe o ha sido movida',
'per_page' => 'por página',
'permission_denied' => 'No tienes permiso para realizar esta acción',
'post' => 'Publicación',
'proceed' => 'Procedes',
'proceed_with_request' => '¿Quiere eliminar esto?',
'processing' => 'Procesando....',
'remove' => 'Remover',
'save' => 'Guardar',
'search_for' => 'Buscar...',
'select_one' => 'Seleccionar una',
'sidebar' => 'Barra lateral',
'share' => 'Compartir',
'something_wrong' => 'Algo va mal. Por favor, intente luego.',
'sort_by' => 'Ordenar por',
'start_date' => 'Fecha inicio',
'system' => 'Sistema',
'to' => 'a',
'token_expired' => 'Su sesión ha expirado. Ingrese de nuevo',
'total_result_found' => 'Total :count resultados encontrados.',
'update' => 'Actualizar',
'upload' => 'Cargar',
'yes' => 'Sí',
'email_sent' => 'Email enviado.',
'email_not_sent' => 'El email no ha sido enviado.',
'blog' => 'Blog',
'title' => 'Título',
'subtitle' => 'Subtítilo',
'list' => 'Lista',
'portfolio' => 'Portafolio'
];
|
function greet()
println("hello world")
end
# greet (generic function with 1 method)
greet()
# hello world
function calculator(x, y, operation)
if operation == "+"
x+y
elseif operation == "-"
x-y
elseif operation == "*"
x*y
elseif operation == "/"
x/y
else
println("Incorrect operation")
return 0
end
end
# calculator (generic function with 1 method)
println(calculator(10,20, "+"))
# 30
println(calculator(10,20, "-"))
# -10
println(calculator(10,20, "*"))
# 200
println(calculator(10,20, "/"))
# 0.5
# passing arguments
function say_hello(name)
println("hello $name")
end
# say_hello (generic function with 1 method)
say_hello("rahul")
# hello rahul
# explicitly defining the type
function say_hello(name::String)
println("hello $name")
end
# say_hello (generic function with 1 method)
say_hello("rahul")
# hello rahul
# Variable arguments
function letsplay(x,y...)
println(x)
println(y)
end
# letsplay (generic function with 1 method)
letsplay("cricket","hockey","tennis")
# cricket
# ("hockey","tennis")
|
package com.checkout.hybris.addon.validators.paymentform;
import com.checkout.hybris.addon.forms.PaymentDataForm;
import org.springframework.validation.Errors;
/**
* Validates Ideal payment form
*/
public class CheckoutComIdealPaymentDataFormValidator extends CheckoutComAbstractPaymentDataFormValidValidator {
protected static final String BIC_KEY = "bic";
/**
* {@inheritDoc}
*/
public boolean supports(final Class<?> clazz) {
return PaymentDataForm.class.isAssignableFrom(clazz);
}
/**
* {@inheritDoc}
*/
@Override
public void validate(final Object form, final Errors errors) {
final PaymentDataForm paymentDataForm = (PaymentDataForm) form;
final String bic = (String) paymentDataForm.getFormAttributes().get(BIC_KEY);
if (isFieldBlank(paymentDataForm, BIC_KEY) || !isValidBic(bic)) {
errors.rejectValue("formAttributes['" + BIC_KEY + "']", "checkoutcom.payment.ideal.bic.error");
}
}
/**
* Checks if bic field has the correct length
*
* @param bic the bic (iban) value
* @return true if valid, false otherwise
*/
protected boolean isValidBic(final String bic) {
return bic.matches("^[a-zA-Z0-9]{8}$|^[a-zA-Z0-9]{11}$");
}
}
|
/*jslint browser: true*/
/*globals define*/
/**
* <%= name %>
*
* <%= desc %>
*/
define([
'knockout',
'text!./template.html'
], function (ko, html) {
'use strict';
/**
* <%= name %>ViewModel
*/
function <%= name %>ViewModel(params) {
var self = this;
self.message = ko.observable('Aloha');
} /* End of View Model */
return {
viewModel: <%= name %>ViewModel,
template: html
};
});
|
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'site.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
_$_Search _$_$_SearchFromJson(Map<String, dynamic> json) {
return _$_Search(
q: json['q'] as String,
type: json['type_'] == null
? null
: SearchType.fromJson(json['type_'] as String),
listingType: json['listing_type'] == null
? null
: PostListingType.fromJson(json['listing_type'] as String),
communityId: json['community_id'] as int?,
communityName: json['community_name'] as String?,
sort:
json['sort'] == null ? null : SortType.fromJson(json['sort'] as String),
page: json['page'] as int?,
limit: json['limit'] as int?,
creatorId: json['creator_id'] as int?,
auth: json['auth'] as String?,
);
}
Map<String, dynamic> _$_$_SearchToJson(_$_Search instance) {
final val = <String, dynamic>{
'q': instance.q,
};
void writeNotNull(String key, dynamic value) {
if (value != null) {
val[key] = value;
}
}
writeNotNull('type_', instance.type?.toJson());
writeNotNull('listing_type', instance.listingType?.toJson());
writeNotNull('community_id', instance.communityId);
writeNotNull('community_name', instance.communityName);
writeNotNull('sort', instance.sort?.toJson());
writeNotNull('page', instance.page);
writeNotNull('limit', instance.limit);
writeNotNull('creator_id', instance.creatorId);
writeNotNull('auth', instance.auth);
return val;
}
_$_GetModlog _$_$_GetModlogFromJson(Map<String, dynamic> json) {
return _$_GetModlog(
modPersonId: json['mod_person_id'] as int?,
communityId: json['community_id'] as int?,
page: json['page'] as int?,
limit: json['limit'] as int?,
);
}
Map<String, dynamic> _$_$_GetModlogToJson(_$_GetModlog instance) {
final val = <String, dynamic>{};
void writeNotNull(String key, dynamic value) {
if (value != null) {
val[key] = value;
}
}
writeNotNull('mod_person_id', instance.modPersonId);
writeNotNull('community_id', instance.communityId);
writeNotNull('page', instance.page);
writeNotNull('limit', instance.limit);
return val;
}
_$_CreateSite _$_$_CreateSiteFromJson(Map<String, dynamic> json) {
return _$_CreateSite(
name: json['name'] as String,
sidebar: json['sidebar'] as String?,
description: json['description'] as String?,
icon: json['icon'] as String?,
banner: json['banner'] as String?,
enableDownvotes: json['enable_downvotes'] as bool?,
openRegistration: json['open_registration'] as bool?,
enableNsfw: json['enable_nsfw'] as bool?,
communityCreationAdminOnly: json['community_creation_admin_only'] as bool?,
auth: json['auth'] as String,
);
}
Map<String, dynamic> _$_$_CreateSiteToJson(_$_CreateSite instance) {
final val = <String, dynamic>{
'name': instance.name,
};
void writeNotNull(String key, dynamic value) {
if (value != null) {
val[key] = value;
}
}
writeNotNull('sidebar', instance.sidebar);
writeNotNull('description', instance.description);
writeNotNull('icon', instance.icon);
writeNotNull('banner', instance.banner);
writeNotNull('enable_downvotes', instance.enableDownvotes);
writeNotNull('open_registration', instance.openRegistration);
writeNotNull('enable_nsfw', instance.enableNsfw);
writeNotNull(
'community_creation_admin_only', instance.communityCreationAdminOnly);
val['auth'] = instance.auth;
return val;
}
_$_EditSite _$_$_EditSiteFromJson(Map<String, dynamic> json) {
return _$_EditSite(
name: json['name'] as String?,
sidebar: json['sidebar'] as String?,
description: json['description'] as String?,
icon: json['icon'] as String?,
banner: json['banner'] as String?,
enableDownvotes: json['enable_downvotes'] as bool?,
openRegistration: json['open_registration'] as bool?,
enableNsfw: json['enable_nsfw'] as bool?,
communityCreationAdminOnly: json['community_creation_admin_only'] as bool?,
auth: json['auth'] as String,
);
}
Map<String, dynamic> _$_$_EditSiteToJson(_$_EditSite instance) {
final val = <String, dynamic>{};
void writeNotNull(String key, dynamic value) {
if (value != null) {
val[key] = value;
}
}
writeNotNull('name', instance.name);
writeNotNull('sidebar', instance.sidebar);
writeNotNull('description', instance.description);
writeNotNull('icon', instance.icon);
writeNotNull('banner', instance.banner);
writeNotNull('enable_downvotes', instance.enableDownvotes);
writeNotNull('open_registration', instance.openRegistration);
writeNotNull('enable_nsfw', instance.enableNsfw);
writeNotNull(
'community_creation_admin_only', instance.communityCreationAdminOnly);
val['auth'] = instance.auth;
return val;
}
_$_GetSite _$_$_GetSiteFromJson(Map<String, dynamic> json) {
return _$_GetSite(
auth: json['auth'] as String?,
);
}
Map<String, dynamic> _$_$_GetSiteToJson(_$_GetSite instance) {
final val = <String, dynamic>{};
void writeNotNull(String key, dynamic value) {
if (value != null) {
val[key] = value;
}
}
writeNotNull('auth', instance.auth);
return val;
}
_$_TransferSite _$_$_TransferSiteFromJson(Map<String, dynamic> json) {
return _$_TransferSite(
personId: json['person_id'] as int,
auth: json['auth'] as String,
);
}
Map<String, dynamic> _$_$_TransferSiteToJson(_$_TransferSite instance) =>
<String, dynamic>{
'person_id': instance.personId,
'auth': instance.auth,
};
_$_GetSiteConfig _$_$_GetSiteConfigFromJson(Map<String, dynamic> json) {
return _$_GetSiteConfig(
auth: json['auth'] as String,
);
}
Map<String, dynamic> _$_$_GetSiteConfigToJson(_$_GetSiteConfig instance) =>
<String, dynamic>{
'auth': instance.auth,
};
_$_SaveSiteConfig _$_$_SaveSiteConfigFromJson(Map<String, dynamic> json) {
return _$_SaveSiteConfig(
configHjson: json['config_hjson'] as String,
auth: json['auth'] as String,
);
}
Map<String, dynamic> _$_$_SaveSiteConfigToJson(_$_SaveSiteConfig instance) =>
<String, dynamic>{
'config_hjson': instance.configHjson,
'auth': instance.auth,
};
_$_ResolveObject _$_$_ResolveObjectFromJson(Map<String, dynamic> json) {
return _$_ResolveObject(
q: json['q'] as String,
auth: json['auth'] as String?,
);
}
Map<String, dynamic> _$_$_ResolveObjectToJson(_$_ResolveObject instance) {
final val = <String, dynamic>{
'q': instance.q,
};
void writeNotNull(String key, dynamic value) {
if (value != null) {
val[key] = value;
}
}
writeNotNull('auth', instance.auth);
return val;
}
|
package services
import javax.inject.{Inject, Singleton}
import scala.concurrent._
import scala.util.Properties
import play.api.libs.json.Json
import play.api.libs.ws._
import play.api.libs.json.JsValue
import models.{WeatherForecast, WeatherForecastResponse}
import utils.WeatherUtils
@Singleton
class WeatherForecastService (ws: WSClient, baseUrl: String)(implicit ec: ExecutionContext) {
@Inject() def this (ws: WSClient, ec: ExecutionContext) = this(ws, "http://api.openweathermap.org")(ec)
val API_KEY: String = Properties.envOrElse("WEATHER_API_KEY", "WEATHER_API_KEY")
def getForecastByCityID(cityID: String): Future[WSResponse] = {
val url = s"$baseUrl/data/2.5/forecast?id=$cityID&appid=$API_KEY&units=metric"
ws.url(url).get()
}
def getForecastByCityName(cityName: String): Future[WSResponse] = {
val url = s"$baseUrl/data/2.5/forecast?q=$cityName&appid=$API_KEY&units=metric"
ws.url(url).get()
}
def getForecastForCityByID(id: String): Future[JsValue] = {
val forecastFuture = getForecastByCityID(id)
forecastFuture.map(response => {
val resp = Json.parse(response.body)
val jsresp = resp.validate[WeatherForecastResponse]
jsresp.fold(
err => Json.obj("error" -> err.toString()),
forecast => {
val daily = WeatherUtils.getDailyWeather(forecast)
val result = WeatherForecast(
forecast.cod,
forecast.message,
forecast.cnt,
forecast.list,
forecast.city,
daily
)
Json.toJson(result)
}
)
})
}
}
|
/*
* Copyright 2021 by LunaSec (owned by Refinery Labs, Inc)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import path from 'path';
import * as apigateway from '@aws-cdk/aws-apigateway';
import * as dynamodb from '@aws-cdk/aws-dynamodb';
import * as lambda from '@aws-cdk/aws-lambda';
import * as secretsmanager from '@aws-cdk/aws-secretsmanager';
import * as cdk from '@aws-cdk/core';
import { DeploymentConfigOptions } from '../config/types';
import { AnalyticsCollectorLambda } from './analytics-collector-lambda';
import { CiphertextBucket } from './ciphertext-bucket';
import { getSecureFrameAssets } from './s3-assets';
import { TokenizerBackendBucket, TokenizerBackendCloudfront } from './tokenizer-backend-cloudfront';
import { TokenizerBackendLambda } from './tokenizer-backend-lambda';
import { CDNConfig, LunaSecStackResource, SecureFrameAssetFiles } from './types';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { version } = require('../../package.json');
const secretDescription = 'Secret used by the tokenizer-backend in generating encryption keys for ciphertexts.';
export function getOutputName(name: LunaSecStackResource) {
return `${name.replace(/-/g, '')}Output`;
}
function secureFrameIFramePublicAssetFolder() {
const secureFrameMainScript = require.resolve('@lunasec/secure-frame-front-end');
const secureFrameBuildPath = path.dirname(secureFrameMainScript);
return path.join(secureFrameBuildPath, '../../public');
}
function cfnOutput(scope: cdk.Construct, name: LunaSecStackResource, value: string) {
new cdk.CfnOutput(scope, getOutputName(name), {
value: value,
exportName: getOutputName(name),
});
}
function createDynamoDBTable(
scope: cdk.Construct,
name: LunaSecStackResource,
options?: { ttl?: string; retain?: boolean }
) {
const ttl = options && options.ttl ? { timeToLiveAttribute: options.ttl } : {};
const removalPolicy = options && options.retain ? { removalPolicy: cdk.RemovalPolicy.RETAIN } : {};
const table = new dynamodb.Table(scope, name, {
...ttl,
...removalPolicy,
partitionKey: {
name: 'Key',
type: dynamodb.AttributeType.STRING,
},
});
cfnOutput(scope, name, table.tableName);
return table;
}
function createSecret(scope: cdk.Construct, name: LunaSecStackResource, description: string) {
const secret = new secretsmanager.Secret(scope, name, {
description: description,
removalPolicy: cdk.RemovalPolicy.RETAIN,
});
cfnOutput(scope, name, secret.secretArn);
return secret;
}
function getGateway(scope: cdk.Construct, name: LunaSecStackResource, lambdaProxy: lambda.IFunction) {
const gateway = new apigateway.LambdaRestApi(scope, name, {
handler: lambdaProxy,
});
cfnOutput(scope, name, gateway.url);
}
function getTokenizerBackendCloudfront(scope: cdk.Construct) {
const bucketName = 'tokenizer-backend-bucket';
const bucket = new TokenizerBackendBucket(scope, bucketName);
cfnOutput(scope, bucketName, bucket.bucketName);
return new TokenizerBackendCloudfront(scope, bucket);
}
function getCiphertextBucket(scope: cdk.Construct) {
const bucketName = 'ciphertext-bucket';
const ciphertextBucket = new CiphertextBucket(scope, bucketName);
cfnOutput(scope, bucketName, ciphertextBucket.bucketName);
return ciphertextBucket;
}
export class LunaSecDeploymentStack extends cdk.Stack {
public secureFrameAssets?: SecureFrameAssetFiles;
constructor(app: cdk.App, id: string, local: boolean, deploymentConfig: DeploymentConfigOptions) {
super(app, id);
const ciphertextBucket = getCiphertextBucket(this);
const metadataTable = createDynamoDBTable(this, 'metadata-table');
const keysTable = createDynamoDBTable(this, 'keys-table', {
retain: true,
});
const sessionsTable = createDynamoDBTable(this, 'sessions-table', {
ttl: '24h',
});
const grantsTable = createDynamoDBTable(this, 'grants-table', {
ttl: '24h',
});
const tokenizerSecret = createSecret(this, 'tokenizer-secret', secretDescription);
// all other resources are not included in the local deployment
if (local) {
return;
}
const secureFrameAssetFolder = secureFrameIFramePublicAssetFolder();
this.secureFrameAssets = getSecureFrameAssets(secureFrameAssetFolder);
const tokenizerBackendCloudfront = getTokenizerBackendCloudfront(this);
const cdnConfig: CDNConfig = {
protocol: 'https',
host: tokenizerBackendCloudfront.domainName,
main_script: this.secureFrameAssets.files.mainScript.filename,
main_style: this.secureFrameAssets.files.mainStyle.filename,
};
const tokenizerBackendLambda = new TokenizerBackendLambda(this, version, {
deploymentConfig: deploymentConfig,
cdnConfig: JSON.stringify(cdnConfig),
tokenizerBackendCloudfront,
ciphertextBucket,
tokenizerSecret,
metadataTable,
keysTable,
sessionsTable,
grantsTable,
});
getGateway(this, 'gateway', tokenizerBackendLambda);
if (!deploymentConfig.metrics.disabled) {
new AnalyticsCollectorLambda(this, version);
}
}
}
|
(ns ayatori.lra.core
(:require
[clojure.string :as string]
[java-time :as jt]
[ayatori.lra.db :as db]
[malli.core :as m]
[exoscale.ex :as ex]
[ayatori.lra-domain.interface :as domain]
[clojure.core.async :as async])
(:import (clojure.core.async.impl.channels ManyToManyChannel)))
(def AsyncChannel
;; check an instance of for now
[:fn (fn [v] (instance? ManyToManyChannel v))])
(m/=> closable-lra? [:=>
[:cat domain/LRA]
boolean?])
(defn closable-lra?
[lra]
(= :active (:lra/status lra)))
(m/=> cancellable-lra? [:=>
[:cat domain/LRA]
boolean?])
(defn cancellable-lra?
[lra]
(= :active (:lra/status lra)))
(m/=> joinable-lra? [:=>
[:cat domain/LRA]
boolean?])
(defn joinable-lra?
[lra]
(= :active (:lra/status lra)))
(m/=> data->lra [:=>
[:cat domain/StartLRAData]
domain/LRA])
(defn data->lra
[data]
(let [now (jt/instant)
time-limit (:lra/time-limit data)]
(-> data
(assoc :lra/code (db/uuid)
:lra/start-time now
:lra/status :active
:lra/participants [{:participant/client-id (:lra/client-id data)
:participant/top-level? false
:participant/status :active
:participant/acts (:lra/acts data)}])
(#(if (> time-limit 0) (assoc % :lra/finish-time (jt/plus now (jt/millis time-limit))) %))
(dissoc :lra/acts)
(dissoc :lra/parent-code))))
(m/=> ->toplevel-participant [:=>
[:cat domain/LRA]
domain/Participant])
(defn ->toplevel-participant
[lra]
{:participant/client-id (:lra/client-id lra)
:participant/top-level? true
:participant/status :active
:participant/lra-code (:lra/code lra)})
(m/=> data->participant [:=>
[:cat domain/JoinParticipantData]
domain/Participant])
(defn data->participant
[data]
(assoc data
:participant/top-level? false
:participant/status :active))
(m/=> all-lra [:=>
[:cat db/DatabaseComponent domain/LRAStatus]
[:maybe [:vector domain/LRA]]])
(defn all-lra
[database status]
(db/all-by-status (database) status))
(m/=> lra-by-code [:=>
[:cat db/DatabaseComponent domain/LRACode]
[:maybe domain/LRA]])
(defn lra-by-code
[database code]
(ex/try+
(->
(db/find-by-code (database) code)
(#(or %
(throw (ex-info (format "LRA not found with code %s" code)
{::ex/type ::lra-not-found :lra-code code})))))
(catch :ayatori.lra.db/generic-db-error _
(throw (ex-info (format "LRA not found with code %s" code)
{::ex/type ::lra-not-found :lra-code code})))))
(m/=> update-lra! [:=>
[:cat db/DatabaseComponent domain/LRA]
[:maybe domain/LRA]])
(defn update-lra!
[database lra]
(ex/try+
(->> (lra-by-code database (:lra/code lra))
(db/save! (database)))
(catch Exception e
(throw (ex-info "Update LRA failed"
{::ex/type ::update-lra-failed} e)))))
(m/=> new-lra! [:=>
[:cat db/DatabaseComponent domain/StartLRAData]
[:maybe domain/LRA]])
(defn new-lra!
[database data]
(ex/try+
(->> (data->lra data)
(db/save! (database)))
(catch Exception e
(throw (ex-info "Createing new LRA failed"
{::ex/type ::start-lra-failed} e)))))
(m/=> new-nested-lra! [:=>
[:cat db/DatabaseComponent domain/LRA domain/LRA]
[:maybe [:map [:parent-code domain/LRACode]
[:lra-code domain/LRACode]]]])
(defn new-nested-lra!
[database parent lra]
(ex/try+
(->> (->toplevel-participant lra)
(update parent :lra/participants conj)
(db/save! (database))
:lra/code
(assoc {} :lra-code (:lra/code lra) :parent-code))
(catch Exception e
(throw (ex-info "Creating nested LRA failed"
{::ex/type ::start-nested-lra-failed} e)))))
(m/=> start-lra! [:=>
[:cat db/DatabaseComponent domain/StartLRAData]
[:maybe domain/LRACode]])
(defn start-lra!
[database data]
(ex/try+
(if (string/blank? (:lra/parent-code data))
(-> (new-lra! database data)
:lra/code)
;;else
(let [parent (lra-by-code database (:lra/parent-code data))
lra (new-lra! database data)]
(-> (new-nested-lra! database parent lra)
:lra-code)))
(catch Exception e
(throw (ex-info "Start LRA failed"
{::ex/type ::start-lra-failed} e)))))
(m/=> join! [:=>
[:cat db/DatabaseComponent domain/LRACode domain/JoinParticipantData]
[:maybe domain/LRACode]])
(defn join!
[database code participant]
(ex/try+
(->> (lra-by-code database code)
(#(if (joinable-lra? %)
(->> (data->participant participant)
(update % :lra/participants conj)
(db/save! (database)))
;; else
(throw (ex-info (format "Joinable LRA not found with code %s" code)
{::ex/type ::lra-not-found}))))
:lra/code)
(catch Exception e
(throw (ex-info (format "Join failed with code %s" code)
{::ex/type ::join-lra-failed} e)))))
(m/=> close! [:=>
[:cat db/DatabaseComponent AsyncChannel domain/LRA]
[:maybe domain/LRACode]])
(defn close!
[database lra-engine-input-chan {:lra/keys [code]
:as lra}]
(if (closable-lra? lra)
(do
(db/set-status! (database) code :closing)
(async/go (async/put! lra-engine-input-chan {:type :close
:lra lra}))
code)
(throw (ex-info (format "Closable LRA not found with code %s" code)
{::ex/type ::lra-not-found}))))
(m/=> close-lra! [:=>
[:cat db/DatabaseComponent AsyncChannel domain/LRACode]
[:maybe domain/LRACode]])
(defn close-lra!
[database lra-engine-input-chan code]
(ex/try+
(->> (lra-by-code database code)
(close! database lra-engine-input-chan))
(catch Exception e
(throw (ex-info "Close LRA failed"
{::ex/type ::close-lra-failed} e)))))
(m/=> cancel! [:=>
[:cat db/DatabaseComponent AsyncChannel domain/LRA]
[:maybe domain/LRACode]])
(defn cancel!
[database lra-engine-input-chan {:lra/keys [code]
:as lra}]
(if (cancellable-lra? lra)
(do
(db/set-status! (database) code :cancelling)
(async/go (async/put! lra-engine-input-chan {:type :cancel
:lra lra}))
code)
(throw (ex-info (format "Cancellable LRA not found with code %s" code)
{::ex/type ::lra-not-found}))))
(m/=> cancel-lra! [:=>
[:cat db/DatabaseComponent AsyncChannel domain/LRACode]
[:maybe domain/LRACode]])
(defn cancel-lra!
[database lra-engine-input-chan code]
(ex/try+
(->> (lra-by-code database code)
(cancel! database lra-engine-input-chan))
(catch Exception e
(throw (ex-info "Cancel LRA failed"
{::ex/type ::cancel-lra-failed} e)))))
|
#!/usr/bin/env ruby
require 'rubygems'
require 'nokogiri'
# print error mesage and exit with status 1
def error(message="Unknown error")
puts "ERROR: #{message}"
exit 1
end
#parse the spath and fill into the provided result file
def parse(spath, result)
# if the file is not present, return the result as is
unless File.file? spath
return result
end
# if the file is empty, return the result as is
contents = File.open(spath).read
unless contents.length > 0
return result
end
# parse the doc
begin
doc = Nokogiri::XML(File.open(spath)) do |config|
config.strict.noent
end
rescue Exception => e
error e
end
# we support trema-1.0
trema = doc/'trema'
ver = trema.attr('noNamespaceSchemaLocation').value
exp = 'http://software.group.nca/trema/schema/trema-1.0.xsd'
error "Unsupported TREMA version '#{ver}'" unless exp == ver
# itterate trough all texts
(trema/'text').each do |text|
key = text.attr('key')
# and all values
(text/'value').each do |value|
lang = value.attr('lang')
# create the string for that language if not existent
unless result[lang]
result[lang] = Hash.new
end
language_hash = result[lang]
# escape the quotes
string = value.content
string = string.gsub("\"", "\\\"")
language_hash[key] = string
end
end
result
end
#save each key/language from the result dictionary into a messages.strings file
def save(result, dpath)
result.each do |key, language_hash|
path = "#{dpath}/#{key}.lproj"
Dir.mkdir(path) unless File.directory? path
fout = File.open("#{path}/messages.strings", 'w')
language_hash.each do |key, string|
text = "\"#{key}\" = \"#{string}\";\n"
fout.puts text
end
fout.close()
end
end
error "Invalid arguments provided. \n"\
"Usage: \n"\
" - ruby trema.rb source.trm output_folder or\n"\
" - ruby trema.rb source.trm second_source.trm output_folder\n"\
"Note that only the first source file must be present.\n"\
unless ARGV.length == 2 or ARGV.length == 3
if ARGV.length == 2
spath = ARGV[0]
dpath = ARGV[1]
error 'The source path should point to an existing file' unless File.file? spath
error 'The output path should point to an existing directory' unless File.directory? dpath
result = parse(spath, Hash.new)
save(result, dpath)
end
if ARGV.length == 3
s1path = ARGV[0]
s2path = ARGV[1]
dpath = ARGV[2]
error 'The first source path should point to an existing file' unless File.file? s1path
error 'The output path should point to an existing directory' unless File.directory? dpath
result = parse(s1path, Hash.new)
result = parse(s2path, result)
save(result, dpath)
end
|
/*
* Copyright 2020 The Matrix.org Foundation C.I.C.
*
* licensed under the apache license, version 2.0 (the "license");
* you may not use this file except in compliance with the license.
* you may obtain a copy of the license at
*
* http://www.apache.org/licenses/license-2.0
*
* unless required by applicable law or agreed to in writing, software
* distributed under the license is distributed on an "as is" basis,
* without warranties or conditions of any kind, either express or implied.
* see the license for the specific language governing permissions and
* limitations under the license.
*/
package org.wormhole.android.sdk.internal.database.model
import io.realm.RealmObject
internal open class PushConditionEntity(
var kind: String = "",
var key: String? = null,
var pattern: String? = null,
var iz: String? = null
) : RealmObject() {
companion object
}
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package connectors.subscriptiondata
import connectors.httpparser.GetSubscriptionDetailsHttpParser._
import play.api.libs.json.{Json, OFormat}
import play.api.test.Helpers.{INTERNAL_SERVER_ERROR, NO_CONTENT, OK}
import uk.gov.hmrc.http.HttpResponse
import utilities.UnitTestTrait
class GetSubscriptionDetailsHttpParserSpec extends UnitTestTrait {
val testHttpVerb = "GET"
val testUri = "/"
case class DummyModel(body: String)
object DummyModel{
implicit val format: OFormat[DummyModel] = Json.format[DummyModel]
}
"GetSelfEmploymentHttpReads" when {
"read" should {
"parse a correctly formatted OK response and return the data in a model" in {
val httpResponse = HttpResponse(OK, Some(Json.obj("body" -> "Test Body")))
lazy val res = getSubscriptionDetailsHttpReads[DummyModel].read(testHttpVerb, testUri, httpResponse)
res mustBe Some(DummyModel(body = "Test Body"))
}
"parse an incorrectly formatted Ok response as an invalid Json" in {
val httpResponse = HttpResponse(OK, Some(Json.obj()))
lazy val res = getSubscriptionDetailsHttpReads[DummyModel].read(testHttpVerb, testUri, httpResponse)
the[Exception] thrownBy res must have message "Invalid Json for getSubscriptionDetailsHttpReads"
}
"parse an no content response as None" in {
val httpResponse = HttpResponse(NO_CONTENT)
lazy val res = getSubscriptionDetailsHttpReads[DummyModel].read(testHttpVerb, testUri, httpResponse)
res mustBe None
}
"parse any other http status as a UnexpectedStatusFailure" in {
val httpResponse = HttpResponse(INTERNAL_SERVER_ERROR)
lazy val res = getSubscriptionDetailsHttpReads[DummyModel].read(testHttpVerb, testUri, httpResponse)
the[Exception] thrownBy res must have message s"Unexpected status: $INTERNAL_SERVER_ERROR"
}
}
}
}
|
var AuthBasic = function (authConfig) {
var passport = require('passport');
var BasicStrategy = require('passport-http').BasicStrategy;
var HydraUser = require('./hydra-user');
passport.use(new BasicStrategy({realm: authConfig.realm},
function (username, password, done) {
if (authConfig.authenticate(username, password)) {
return done(null, new HydraUser(username, username, null, []));
} else {
return done(null, false);
}
}
));
// Function to perform authentication
var authenticate = passport.authenticate('basic', {session: false});
// With HTTP Basic Auth, auth headers are passed with every request, so checking for
// auth is the same thing as performing it.
var ensureAuthenticated = authenticate;
return {
buildUser: function (object) {
return new HydraUser(object);
},
passport: passport,
authenticate: authenticate,
ensureAuthenticated: ensureAuthenticated,
passportUseSession: false,
allowLogout: function () {
return false;
}
};
};
module.exports = AuthBasic;
|
{-# LANGUAGE NamedFieldPuns, TemplateHaskell #-}
--------------------------------------------------------------------------------
-- |
-- Module : Language.C4.Fir.Stmt.Stmt
-- Description : Tests for Language.C4.Fir.Stmt.Stmt
-- Copyright : (c) Matt Windsor, 2018, 2019, 2020
-- License : MIT
-- Maintainer : [email protected]
-- Stability : experimental
--------------------------------------------------------------------------------
module Test.Fir.Stmt.Stmt (tests) where
import qualified Control.Lens as L
import Hedgehog
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Language.C4.Fir.Stmt.Stmt as Src
tests :: IO Bool
tests = checkSequential $$(discover)
|
Dune::Balanced::Creditcard::Engine.routes.draw do
resources :payments, only: %i(new create update)
resources :notifications, only: :create
end
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
# This script relies on Cloudogu apt repository beeing present. See ces_apt.sh
echo "installing etcd - start"
# install etcd
apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages ces-etcd
# Starting etcd is not necessary, because it is done in the postinst script of the etcd package
# See https://github.com/cloudogu/etcd/blob/develop/deb/DEBIAN/postinst#L16
echo "installing etcd - end"
|
import pandas as pd
import subprocess
from scipy.io import mmread, mmwrite
from scipy.sparse import csr_matrix
import numpy as np
import os
from sklearn.preprocessing import binarize
def summit_extend(summit_file,out_file,extend_size):
with open(summit_file) as input:
output=open(out_file,'w')
for line in input:
items=line.rstrip('\n').split('\t')
[chr,start,end,name,val]=items
name=name.split('/')[-1]
start = int(int(start) - extend_size/2)
if start<0:
start=0
end = int(int(end) + extend_size/2-1)
output.write('\t'.join(map(str, [chr,start,end,name,val,'.']))+'\n')
def peak_count_matrix(atac_inter_bed, out_prefix, binary):
# first three columns is 'chr','start','end'
# last column is barcode
inter_peak=pd.read_csv(atac_inter_bed,header=None,sep='\t')
inter_peak.columns=['chr','start','end']+['']*(inter_peak.shape[1]-4)+['barcode']
#inter_peak['peak']=inter_peak['name'].apply(lambda x: x.split('/')[-1])
inter_peak['peak']=inter_peak.apply(lambda x: x['chr']+':'+str(x['start'])+'-'+str(x['end']), axis=1)
bc_peak_counts=inter_peak.groupby(['barcode','peak']).size()
df=pd.DataFrame(bc_peak_counts)
df.reset_index(inplace=True)
#new_df=df.pivot(index='peak', columns='barcode', values=0) #cannot process big dataset
#df=df.astype({0:"int64"})
new_df=df.groupby(['peak','barcode'])[0].max().unstack()
new_df.columns.name = None
new_df.index.name = None
new_df=new_df.fillna(0)
new_df=new_df.astype('int')
if binary:
Xb=binarize(new_df)
new_df=pd.DataFrame(Xb, index=new_df.index, columns=new_df.columns)
mmwrite(out_prefix+'count.mtx', csr_matrix(new_df))
np.savetxt(out_prefix+'peaks.txt',new_df.index.values,fmt="%s")
np.savetxt(out_prefix+'barcodes.txt',new_df.columns.values,fmt="%s")
def intersect(refpeak, bed, outdir):
refpeak_name=refpeak.split('/')[-1].split('.')[0]
bed_name=bed.split('/')[-1].split('.')[0]
out_intersect=outdir+'/'+bed_name+'_'+refpeak_name+'_intersect.bed'
#if (not os.path.exists(out_intersect)) or (os.path.exists(out_intersect) and os.path.getsize(out_intersect)==0):
intersect=subprocess.check_output(['bedtools', 'intersect', '-a', refpeak, '-b', bed, '-wa','-wb'])
with open(out_intersect,'wb') as output: # py3
output.write(intersect)
return out_intersect
def get_count_matrix(refpeak, bed, outdir, sample, binary=False):
if not os.path.exists(outdir):
os.makedirs(outdir)
if not os.path.exists(outdir+'/'+sample):
os.makedirs(outdir+'/'+sample)
out_intersect=intersect(refpeak, bed, outdir)
peak_count_matrix(out_intersect, outdir+'/'+sample+'/', binary)
if __name__ == '__main__':
import sys
refpeak=sys.argv[1]
bed=sys.argv[2]
outdir=sys.argv[3]
sample=sys.argv[4]
get_count_matrix(refpeak, bed, outdir, sample)
'''
indir='/Share2/home/zhangqf5/yanqiu/scAR/output/ZJSMix0928/peak_cor'
peak_count_matrix(indir+'/K562_ATAC_summits_K562_ATAC_shift_intersect2.bed',
indir+'/coA_K562/')
peak_count_matrix(indir+'/K562_ATAC_summits_K562_ATAC_shift_intersect.bed',
indir+'/onlyA_K562/')
peak_count_matrix(indir+'/3T3_ATAC_summits_3T3_ATAC_shift_intersect2.bed',
indir+'/coA_3T3/')
peak_count_matrix(indir+'/3T3_ATAC_summits_3T3_ATAC_shift_intersect.bed',
indir+'/onlyA_3T3/')
peak_count_matrix(indir+'/Mix_ATAC_summits_Mix_ATAC_shift_intersect2.bed',
indir+'/coA_Mix/')
peak_count_matrix(indir+'/Mix_ATAC_summits_Mix_ATAC_shift_intersect.bed',
indir+'/onlyA_Mix/')
'''
|
import Controller from '@ember/controller';
import { action } from '@ember/object';
import { tracked } from '@glimmer/tracking';
import { inject as service } from '@ember/service';
export default class CarController extends Controller {
@service store;
@tracked brand = '';
@tracked carModel = '';
@action
createCar(event) {
event.preventDefault();
const car = this.store.createRecord('car', {
brand: this.brand,
model: this.carModel,
});
car.save();
this.brand = '';
this.carModel = '';
}
@action
removeCar(car, event) {
event.preventDefault();
car.destroyRecord();
}
}
|
// ppg-compiler.js
// Tools to compile markdown into blog posts, generate pages of post links/summaries to show
// David Lenkner, 2017
var gulp = require('gulp');
var markdown = require('gulp-markdown');
var concat = require('gulp-concat-util');
var flatten = require('gulp-flatten');
var pageFrameInj = require('./pageframe-inject.js');
// Public items to be used in gulpfile.js
module.exports = {
// Takes markdown and turns to templates to be loaded via Underscore
markdownToTemplates: function(publishRootDir) {
return gulp.src('posts/*.md')
.pipe(markdown()) // Turns markdown into html
.pipe(concat.header('<div id="postdiv">')) // Views like to be wrapped in div
.pipe(concat.footer('</div>'))
.pipe(gulp.dest(publishRootDir + 'posts'));
},
// Takes pages, injects headers and footers
insertHFAndSendPageHTMLs: function(publishRootDir) {
return pageFrameInj.injectHeaderFooter(gulp.src('posts/*.html'))
.pipe(gulp.dest(publishRootDir + 'posts'));
},
// Takes supporting images and puts them in img
publishSupportImg: function(publishRootDir) {
return gulp.src(['posts/**/*.jpg','posts/**/*.png','posts/**/*.svg','posts/**/*.gif'])
.pipe(flatten())
.pipe(gulp.dest(publishRootDir + 'img'));
}
};
// Internal Functions
|
/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License (the "License").
* You may not use this file except in compliance with the License.
*
* You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
* or http://www.opensolaris.org/os/licensing.
* See the License for the specific language governing permissions
* and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at usr/src/OPENSOLARIS.LICENSE.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
* Copyright 2008 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
#pragma ident "%Z%%M% %I% %E% SMI"
/* Copyright (c) 1984, 1986, 1987, 1988, 1989 AT&T */
/* All Rights Reserved */
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/procset.h>
#include <sys/priocntl.h>
#include <sys/rtpriocntl.h>
#include <sys/param.h>
#include <signal.h>
#include <libgen.h>
#include <limits.h>
#include <errno.h>
#include "priocntl.h"
/*
* This file contains the class specific code implementing
* the real-time priocntl sub-command.
*/
#define ADDKEYVAL(p, k, v) { (p[0]) = (k); (p[1]) = (v); p += 2; }
#define RT_KEYCNT 4 /* maximal number of (key, value) pairs */
/*
* control flags
*/
#define RT_DOPRI 0x01 /* change priority */
#define RT_DOTQ 0x02 /* change RT time quantum */
#define RT_DOSIG 0x10 /* change RT time quantum signal */
static void print_rtinfo(void);
static int print_rtprocs(void);
static int rt_priocntl(idtype_t, id_t, int, char *, uintptr_t *);
static int set_rtprocs(idtype_t, int, char **, uint_t, pri_t, long,
long, int);
static void exec_rtcmd(char **, uint_t, pri_t, long, long, int);
static char usage[] =
"usage: priocntl -l\n"
" priocntl -d [-i idtype] [idlist]\n"
" priocntl -s [-c RT] [-p rtpri] [-t tqntm [-r res]] [-q tqsig]\n"
" [-i idtype] [idlist]\n"
" priocntl -e [-c RT] [-p rtpri] [-t tqntm [-r res]] [-q tqsig]\n"
" command [argument(s)]\n";
static char cmdpath[MAXPATHLEN];
static char basenm[BASENMSZ];
int
main(int argc, char *argv[])
{
int c;
int lflag, dflag, sflag, pflag;
int tflag, rflag, eflag, iflag, qflag;
pri_t rtpri;
long tqntm;
long res;
int tqsig;
char *idtypnm;
idtype_t idtype;
int idargc;
uint_t cflags;
(void) strlcpy(cmdpath, argv[0], MAXPATHLEN);
(void) strlcpy(basenm, basename(argv[0]), BASENMSZ);
lflag = dflag = sflag = pflag = 0;
tflag = rflag = eflag = iflag = qflag = 0;
while ((c = getopt(argc, argv, "ldsp:t:r:q:ec:i:")) != -1) {
switch (c) {
case 'l':
lflag++;
break;
case 'd':
dflag++;
break;
case 's':
sflag++;
break;
case 'p':
pflag++;
rtpri = (pri_t)str2num(optarg, SHRT_MIN, SHRT_MAX);
if (errno)
fatalerr("%s: Specified real time priority %s"
" out of configured range\n",
basenm, optarg);
break;
case 't':
tflag++;
tqntm = str2num(optarg, 1, INT_MAX);
if (errno)
fatalerr("%s: Invalid time quantum specified;"
" time quantum must be positive\n", basenm);
break;
case 'r':
rflag++;
res = str2num(optarg, 1, 1000000000);
if (errno)
fatalerr("%s: Invalid resolution specified;"
" resolution must be between"
" 1 and 1,000,000,000\n", basenm);
break;
case 'q':
qflag++;
if (str2sig(optarg, &tqsig) != 0)
fatalerr("%s: Invalid real time quantum signal"
" specified\n", basenm);
break;
case 'e':
eflag++;
break;
case 'c':
if (strcmp(optarg, "RT") != 0)
fatalerr("error: %s executed for %s class, %s"
" is actually sub-command for RT class\n",
cmdpath, optarg, cmdpath);
break;
case 'i':
iflag++;
idtypnm = optarg;
break;
case '?':
fatalerr(usage);
default:
break;
}
}
if (lflag) {
if (dflag || sflag || pflag || tflag || rflag || eflag ||
iflag || qflag)
fatalerr(usage);
print_rtinfo();
} else if (dflag) {
if (lflag || sflag || pflag || tflag || rflag || eflag || qflag)
fatalerr(usage);
return (print_rtprocs());
} else if (sflag) {
if (lflag || dflag || eflag)
fatalerr(usage);
if (iflag) {
if (str2idtyp(idtypnm, &idtype) == -1)
fatalerr("%s: Bad idtype %s\n", basenm,
idtypnm);
} else {
idtype = P_PID;
}
cflags = (pflag ? RT_DOPRI : 0);
if (tflag)
cflags |= RT_DOTQ;
if (rflag == 0)
res = 1000;
if (optind < argc)
idargc = argc - optind;
else
idargc = 0;
if (qflag)
cflags |= RT_DOSIG;
return (set_rtprocs(idtype, idargc, &argv[optind], cflags,
rtpri, tqntm, res, tqsig));
} else if (eflag) {
if (lflag || dflag || sflag || iflag)
fatalerr(usage);
cflags = (pflag ? RT_DOPRI : 0);
if (tflag)
cflags |= RT_DOTQ;
if (rflag == 0)
res = 1000;
if (qflag)
cflags |= RT_DOSIG;
exec_rtcmd(&argv[optind], cflags, rtpri, tqntm, res, tqsig);
} else {
fatalerr(usage);
}
return (0);
}
/*
* Print our class name and the configured user priority range.
*/
static void
print_rtinfo(void)
{
pcinfo_t pcinfo;
(void) strcpy(pcinfo.pc_clname, "RT");
(void) printf("RT (Real Time)\n");
if (priocntl(0, 0, PC_GETCID, (caddr_t)&pcinfo) == -1)
fatalerr("\tCan't get maximum configured RT priority\n");
(void) printf("\tConfigured RT User Priority Range: 0 through %d\n",
((rtinfo_t *)pcinfo.pc_clinfo)->rt_maxpri);
}
/*
* Read a list of pids from stdin and print the real-time priority and time
* quantum (in millisecond resolution) for each of the corresponding processes.
*/
static int
print_rtprocs(void)
{
pid_t *pidlist;
size_t numread;
int i;
char clname[PC_CLNMSZ];
pri_t rt_pri;
uint_t rt_tqsecs;
int rt_tqnsecs;
int rt_tqsig;
int error = 0;
/*
* Read a list of pids from stdin.
*/
if ((pidlist = read_pidlist(&numread, stdin)) == NULL)
fatalerr("%s: Can't read pidlist.\n", basenm);
(void) printf("REAL TIME PROCESSES:\n"
" PID RTPRI TQNTM TQSIG\n");
if (numread == 0)
fatalerr("%s: No pids on input\n", basenm);
for (i = 0; i < numread; i++) {
(void) printf("%7ld", pidlist[i]);
if (priocntl(P_PID, pidlist[i], PC_GETXPARMS, "RT",
RT_KY_TQSECS, &rt_tqsecs, RT_KY_TQNSECS, &rt_tqnsecs,
RT_KY_PRI, &rt_pri, RT_KY_TQSIG, &rt_tqsig, 0) != -1) {
(void) printf(" %5d", rt_pri);
if (rt_tqnsecs == RT_TQINF)
(void) printf(" RT_TQINF");
else
(void) printf(" %11lld",
(longlong_t)rt_tqsecs * 1000 +
rt_tqnsecs / 1000000);
(void) printf(" %3d\n", rt_tqsig);
} else {
error = 1;
if (priocntl(P_PID, pidlist[i], PC_GETXPARMS, NULL,
PC_KY_CLNAME, clname, 0) != -1 &&
strcmp(clname, "RT"))
/*
* Process from some class other than real time.
* It has probably changed class while priocntl
* command was executing (otherwise we wouldn't
* have been passed its pid). Print the little
* we know about it.
*/
(void) printf("\tChanged to class %s while"
" priocntl command executing\n", clname);
else
(void) printf("\tCan't get real time"
" parameters\n");
}
}
free_pidlist(pidlist);
return (error);
}
/*
* Call priocntl() with command codes PC_SETXPARMS or PC_GETXPARMS.
* The first parameter behind the command code is always the class name.
* Each parameter is headed by a key, which determines the meaning of the
* following value. There are maximal RT_KEYCNT = 4 (key, value) pairs.
*/
static int
rt_priocntl(idtype_t idtype, id_t id, int cmd, char *clname, uintptr_t *argsp)
{
return (priocntl(idtype, id, cmd, clname, argsp[0], argsp[1],
argsp[2], argsp[3], argsp[4], argsp[5], argsp[6], argsp[7], 0));
}
/*
* Set all processes in the set specified by idtype/idargv to real time
* (if they aren't already real time) and set their real-time priority,
* real-time quantum and real-time quantum signal to those specified by
* rtpri, tqntm/res and rtqsig.
*/
static int
set_rtprocs(idtype_t idtype, int idargc, char **idargv, uint_t cflags,
pri_t rtpri, long tqntm, long res, int rtqsig)
{
pcinfo_t pcinfo;
uintptr_t args[2*RT_KEYCNT+1];
uintptr_t *argsp = &args[0];
pri_t maxrtpri;
hrtimer_t hrtime;
char idtypnm[PC_IDTYPNMSZ];
int i;
id_t id;
int error = 0;
/*
* Get the real time class ID and max configured RT priority.
*/
(void) strcpy(pcinfo.pc_clname, "RT");
if (priocntl(0, 0, PC_GETCID, (caddr_t)&pcinfo) == -1)
fatalerr("%s: Can't get RT class ID, priocntl system call"
" failed with errno %d\n", basenm, errno);
maxrtpri = ((rtinfo_t *)pcinfo.pc_clinfo)->rt_maxpri;
/*
* Validate the rtpri and res arguments.
*/
if ((cflags & RT_DOPRI) != 0) {
if (rtpri > maxrtpri || rtpri < 0)
fatalerr("%s: Specified real time priority %d out of"
" configured range\n", basenm, rtpri);
ADDKEYVAL(argsp, RT_KY_PRI, rtpri);
}
if ((cflags & RT_DOTQ) != 0) {
hrtime.hrt_secs = 0;
hrtime.hrt_rem = tqntm;
hrtime.hrt_res = res;
if (_hrtnewres(&hrtime, NANOSEC, HRT_RNDUP) == -1)
fatalerr("%s: Can't convert resolution.\n", basenm);
ADDKEYVAL(argsp, RT_KY_TQSECS, hrtime.hrt_secs);
ADDKEYVAL(argsp, RT_KY_TQNSECS, hrtime.hrt_rem);
}
if ((cflags & RT_DOSIG) != 0)
ADDKEYVAL(argsp, RT_KY_TQSIG, rtqsig);
*argsp = 0;
if (idtype == P_ALL) {
if (rt_priocntl(P_ALL, 0, PC_SETXPARMS, "RT", args) == -1) {
if (errno == EPERM) {
(void) fprintf(stderr,
"Permissions error encountered"
" on one or more processes.\n");
error = 1;
} else {
fatalerr("%s: Can't reset real time parameters"
"\npriocntl system call failed with"
" errno %d\n", basenm, errno);
}
}
} else if (idargc == 0) {
if (rt_priocntl(idtype, P_MYID, PC_SETXPARMS, "RT",
args) == -1) {
if (errno == EPERM) {
(void) idtyp2str(idtype, idtypnm);
(void) fprintf(stderr, "Permissions error"
" encountered on current %s.\n", idtypnm);
error = 1;
} else {
fatalerr("%s: Can't reset real time parameters"
"\npriocntl system call failed with"
" errno %d\n", basenm, errno);
}
}
} else {
(void) idtyp2str(idtype, idtypnm);
for (i = 0; i < idargc; i++) {
if (idtype == P_CID) {
(void) strcpy(pcinfo.pc_clname, idargv[i]);
if (priocntl(0, 0, PC_GETCID,
(caddr_t)&pcinfo) == -1)
fatalerr("%s: Invalid or unconfigured"
" class %s, priocntl system call"
" failed with errno %d\n",
basenm, pcinfo.pc_clname, errno);
id = pcinfo.pc_cid;
} else {
id = (id_t)str2num(idargv[i], INT_MIN, INT_MAX);
if (errno)
fatalerr("%s: Invalid id \"%s\"\n",
basenm, idargv[i]);
}
if (rt_priocntl(idtype, id, PC_SETXPARMS, "RT",
args) == -1) {
if (errno == EPERM) {
(void) fprintf(stderr,
"Permissions error encountered on"
" %s %s.\n", idtypnm, idargv[i]);
error = 1;
} else {
fatalerr("%s: Can't reset real time"
" parameters\npriocntl system call"
" failed with errno %d\n",
basenm, errno);
}
}
}
}
return (error);
}
/*
* Execute the command pointed to by cmdargv as a real-time process
* with real time priority rtpri, quantum tqntm/res and quantum signal rtqsig.
*/
static void
exec_rtcmd(char **cmdargv, uint_t cflags, pri_t rtpri, long tqntm, long res,
int rtqsig)
{
pcinfo_t pcinfo;
uintptr_t args[2*RT_KEYCNT+1];
uintptr_t *argsp = &args[0];
pri_t maxrtpri;
hrtimer_t hrtime;
/*
* Get the real time class ID and max configured RT priority.
*/
(void) strcpy(pcinfo.pc_clname, "RT");
if (priocntl(0, 0, PC_GETCID, (caddr_t)&pcinfo) == -1)
fatalerr("%s: Can't get RT class ID, priocntl system call"
" failed with errno %d\n", basenm, errno);
maxrtpri = ((rtinfo_t *)pcinfo.pc_clinfo)->rt_maxpri;
if ((cflags & RT_DOPRI) != 0) {
if (rtpri > maxrtpri || rtpri < 0)
fatalerr("%s: Specified real time priority %d out of"
" configured range\n", basenm, rtpri);
ADDKEYVAL(argsp, RT_KY_PRI, rtpri);
}
if ((cflags & RT_DOTQ) != 0) {
hrtime.hrt_secs = 0;
hrtime.hrt_rem = tqntm;
hrtime.hrt_res = res;
if (_hrtnewres(&hrtime, NANOSEC, HRT_RNDUP) == -1)
fatalerr("%s: Can't convert resolution.\n", basenm);
ADDKEYVAL(argsp, RT_KY_TQSECS, hrtime.hrt_secs);
ADDKEYVAL(argsp, RT_KY_TQNSECS, hrtime.hrt_rem);
}
if ((cflags & RT_DOSIG) != 0)
ADDKEYVAL(argsp, RT_KY_TQSIG, rtqsig);
*argsp = 0;
if (rt_priocntl(P_PID, P_MYID, PC_SETXPARMS, "RT", args) == -1)
fatalerr("%s: Can't reset real time parameters\n"
"priocntl system call failed with errno %d\n",
basenm, errno);
(void) execvp(cmdargv[0], cmdargv);
fatalerr("%s: Can't execute %s, exec failed with errno %d\n",
basenm, cmdargv[0], errno);
}
|
package com.example.biznoti0
import android.app.ProgressDialog
import android.content.Intent
import android.os.Bundle
import android.view.View
import android.widget.Button
import android.widget.RadioButton
import android.widget.RadioGroup
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.DatabaseReference
import com.google.firebase.database.FirebaseDatabase
import kotlinx.android.synthetic.main.activity_sign_up.*
class SignUp : AppCompatActivity() {
private lateinit var mFireAuth: FirebaseAuth
private lateinit var userreference: DatabaseReference
lateinit var acType: String
lateinit var radioGroup: RadioGroup
lateinit var rb_investee: RadioButton
lateinit var rb_investor: RadioButton
lateinit var registerButton: Button
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_sign_up)
radioGroup = findViewById(R.id.Radiogroup) as RadioGroup
rb_investee = findViewById(R.id.AT_investee) as RadioButton
rb_investor = findViewById(R.id.AT_investor) as RadioButton
registerButton = findViewById(R.id.register) as Button
signUpIn.setOnClickListener {
startActivity(Intent(this, SignInActivity::class.java))
finish()
}
mFireAuth = FirebaseAuth.getInstance()
registerButton.setOnClickListener(View.OnClickListener {
if (radioGroup.checkedRadioButtonId != -1){
if (rb_investee.isChecked)
acType = "Investee"
if (rb_investor.isChecked)
acType = "Investor"
Registration()
} else {
Toast.makeText(this, "Account Type selection is required", Toast.LENGTH_LONG).show()
}
})
}
private fun Registration() {
val Fnames = SignUpFName.text.toString()
val Lnames = SignUpLName.text.toString()
val Mnames = SignUpMName.text.toString()
val emails = SignUpEmail.text.toString()
val passwords = SignUpPassword.text.toString()
if (Fnames.isEmpty()) {
Toast.makeText(this, "First Name is required", Toast.LENGTH_LONG).show()
} else if (Lnames.isEmpty()) {
Toast.makeText(this, "Last Name is required", Toast.LENGTH_LONG).show()
} else if (emails.isEmpty()) {
Toast.makeText(this, "Email is required", Toast.LENGTH_LONG).show()
} else if (passwords.isEmpty()) {
Toast.makeText(this, "Password is required", Toast.LENGTH_LONG).show()
} else {
val progressDialog = ProgressDialog(this@SignUp)
progressDialog.setTitle("SignUp")
progressDialog.setMessage("Sign up Process in Progress.....")
progressDialog.setCanceledOnTouchOutside(false)
progressDialog.show()
mFireAuth.createUserWithEmailAndPassword(emails, passwords).addOnCompleteListener { task ->
if (task.isSuccessful) {
val currentUser = mFireAuth.currentUser
currentUser!!.sendEmailVerification()
.addOnCompleteListener { task ->
if (task.isSuccessful) {
store(Fnames, Mnames, Lnames, emails, progressDialog)
}
}
} else {
Toast.makeText(this, task.exception?.message, Toast.LENGTH_LONG).show();
progressDialog.dismiss()
}
}
}
}
private fun store(Fnames: String, Mnames: String, Lnames: String, emails: String, progressDialog: ProgressDialog) {
var curruserId = mFireAuth.currentUser!!.uid
userreference = FirebaseDatabase.getInstance().reference.child("usersID").child(curruserId)
val currUserHashMap = HashMap<String, Any>()
currUserHashMap["usersID"] = curruserId
currUserHashMap["ACType"] = acType.toLowerCase()
currUserHashMap["FName"] = Fnames.toLowerCase()
currUserHashMap["MName"] = Mnames.toLowerCase()
currUserHashMap["LName"] = Lnames.toLowerCase()
currUserHashMap["Email"] = emails
currUserHashMap["Profession"] = "Full-time BizNotio User"
currUserHashMap["Education"] = "World University"
currUserHashMap["BizNotioGoals"] = "Serve the world"
currUserHashMap["Interests"] = "Learning"
currUserHashMap["Image"] = "gs://bitnoti0.appspot.com/user Info/profile.png"
userreference.updateChildren(currUserHashMap)
.addOnCompleteListener { task ->
if (task.isSuccessful) {
Toast.makeText(this, "Sign up Success. Check your email and verify it.", Toast.LENGTH_LONG).show();
startActivity(Intent(this@SignUp, SignInActivity::class.java))
finish()
}
else
{
Toast.makeText(this, task.exception?.message, Toast.LENGTH_LONG).show();
progressDialog.dismiss()
}
}
}
}
|
package com.github.chosamuel.kishibe.webgl
import com.github.chosamuel.kishibe.application.Application
import org.khronos.webgl.Uint16Array
import org.khronos.webgl.WebGLRenderingContext as GL
class IndexBuffer(val glContext: GL): Buffer {
val indices = mutableListOf<Short>()
private var indexBuffer = glContext.createBuffer()
fun addIndex(i: Short) = indices.add(i)
fun addIndices(l: List<Short>){
l.forEach {
indices.add(it)
}
}
fun numIndices(): Int = indices.size
override fun bind(app: Application){
val indexData = Uint16Array(indices.toTypedArray())
indexBuffer = glContext.createBuffer()
glContext.bindBuffer(GL.ELEMENT_ARRAY_BUFFER, indexBuffer)
glContext.bufferData(GL.ELEMENT_ARRAY_BUFFER,indexData,GL.STATIC_DRAW)
}
fun draw(glPrimitiveMode: Int) = glContext.drawElements(
glPrimitiveMode,
numIndices(),
GL.UNSIGNED_SHORT,
0
)
override fun unbind() = glContext.deleteBuffer(indexBuffer)
}
|
import { transform } from '../memory';
import { locate } from './locate';
import { reconcile } from './reconcile';
export const queue = [];
export function trigger (memory, elm) {
const { '': [, { '': { '': callback }, ...refs }] } = memory;
const depth = callback();
let map = queue[depth];
if (!queue.length) {
setTimeout(() => {
while (queue.length) {
if (!(map = queue.shift())) continue;
for (const callback of map.values()) callback();
map.clear();
}
}, 0);
}
if (!map) map = queue[depth] = new Map();
else if (map.has(memory)) return;
map.set(memory, () => {
const content = transform(memory);
const { '': [children] } = elm;
const index = children.indexOf(memory);
const sibling = ~index ? locate(children.slice(index + 1)) : undefined;
reconcile(memory, content, refs, elm, memory, sibling);
});
}
|
import { injectable, inject } from 'tsyringe';
import { AccountNotFound } from '@/domain/errors';
import { AccountRepository } from '@/infra/db/account/repositories';
import { DeleteVoluntary } from '@/domain/usecases/voluntary';
import { DeleteAccount } from '@/domain/usecases/account';
import { VoluntaryRepository } from '@/infra/db/voluntaries/repositories';
@injectable()
export class DbDeleteVoluntary implements DeleteVoluntary {
constructor(
@inject('AccountRepository')
private accountRepository: AccountRepository,
@inject('VoluntaryRepository')
private voluntaryRepository: VoluntaryRepository,
@inject('DeleteAccount')
private deleteAccount: DeleteAccount
) {}
async delete(data: DeleteVoluntary.Params): Promise<DeleteVoluntary.Result> {
const { id } = data;
const account = await this.accountRepository.findById(id);
if (!account) {
throw new AccountNotFound();
}
await this.voluntaryRepository.delete(account.voluntary);
await this.deleteAccount.delete({ id: account._id });
return true;
}
}
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
module Control.Cham.Agent where
import Control.Cham.Context
import Control.Cham.Label
import Control.Cham.Name
import Data.Kind
data Agent :: Context -> Type where
Inaction :: Agent 'Empty
-- ^ fixpoint for all reactions
Prefix :: Ion ty ion valence
-> Agent cxt
-> Agent (cxt :. Ion ty ion valence)
-- ^ binds a type variable to this reagent. can be
-- | thought of as currying.
Parallel :: Agent cxt0
-> Agent cxt1
-> Agent (cxt0 >< cxt1)
-- ^ product of reagents that can run in parallel safely.
Restrict :: Particle ty valence
-> Agent cxt
-> Agent (cxt :. Ion ty valence 'Neg)
{- omitted for the moment because it's hard an probably
-- undecidable
Relabel :: (a -> Ion b lbl')
-> Agent (cxt :. Ion a lbl)
-> Agent cxt'
-}
External :: Agent cxt0
-> Agent cxt1
-> Agent (cxt0 >< cxt1)
|
const { logError } = require('../errorLogging.js');
const run = ({messageText, sendMessage, db}) => {
const tokens = messageText.split(' ');
const token0 = tokens.length > 0 && tokens[0].toLowerCase();
const token1 = tokens.length > 1 && tokens[1].toLowerCase();
if (token0 === 'name') {
getName(sendMessage, db);
return true;
} else if (token0 === 'name-add' || token0 === 'add-name') {
putName(tokens.slice(1).join(' '), sendMessage, db);
return true;
} else if (
token0 === 'add' && token1 === 'name' ||
token0 === 'name' && token1 === 'add'
) {
putName(tokens.slice(2).join(' '), sendMessage, db);
return true;
} else {
return false;
}
}
const getName = (sendMessage, db) => {
try {
const names = db.getData('/names');
if (names && names.length > 0) {
const i = getRandInt(names.length);
sendMessage(names[i]);
} else {
sendMessage('I don\'t know any names.');
}
} catch (e) {
logError(e);
sendMessage('I don\'t know any names.');
}
}
const getRandInt = (n) => Math.floor(Math.random() * Math.floor(n));
const putName = (name, sendMessage, db) => {
db.push('/names', [name], false);
sendMessage('Added name: ' + name);
}
module.exports=run;
|
import { RefObject } from 'react';
// https://css-tricks.com/the-trick-to-viewport-units-on-mobile/
export const stretchViewPortHeight = (elem = document.documentElement) => {
const viewportHeight = window.innerHeight * 0.01;
elem.style.setProperty('--vh', viewportHeight + 'px');
};
export const elemInView = (
elem: RefObject<HTMLElement>,
fullyVisible = false
): boolean => {
if (!elem.current) return false;
const position = elem.current.getBoundingClientRect();
const elemTop = position.top;
const elemBottom = position.bottom;
const isVisible = fullyVisible
? elemTop >= 0 && elemBottom <= window.innerHeight
: elemTop < window.innerHeight && elemBottom >= 0;
return isVisible;
};
|
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:get/get.dart';
import 'package:mobile_app/locator.dart';
import 'package:mobile_app/models/user.dart';
import 'package:mobile_app/ui/views/profile/profile_view.dart';
import 'package:mobile_app/utils/image_test_utils.dart';
import 'package:mobile_app/utils/router.dart';
import 'package:mobile_app/viewmodels/profile/profile_viewmodel.dart';
import 'package:mobile_app/viewmodels/profile/user_projects_viewmodel.dart';
import 'package:mockito/mockito.dart';
import 'package:shared_preferences/shared_preferences.dart';
import '../../setup/test_data/mock_user.dart';
import '../../setup/test_helpers.dart';
void main() {
group('ProfileViewTest -', () {
late NavigatorObserver mockObserver;
setUpAll(() async {
SharedPreferences.setMockInitialValues({});
await setupLocator();
locator.allowReassignment = true;
});
setUp(() => mockObserver = NavigatorObserverMock());
Future<void> _pumpProfileView(WidgetTester tester) async {
// Mock Local Storage
var _localStorageService = getAndRegisterLocalStorageServiceMock();
var user = User.fromJson(mockUser);
when(_localStorageService.currentUser).thenReturn(user);
when(_localStorageService.isLoggedIn).thenReturn(true);
// Mock Profile ViewModel
var _profileViewModel = MockProfileViewModel();
locator.registerSingleton<ProfileViewModel>(_profileViewModel);
when(_profileViewModel.fetchUserProfile(any)).thenReturn(null);
when(_profileViewModel.isSuccess(_profileViewModel.FETCH_USER_PROFILE))
.thenReturn(true);
when(_profileViewModel.user).thenReturn(user);
// Mock User Projects ViewModel
var _userProjectsViewModel = MockUserProjectsViewModel();
locator.registerSingleton<UserProjectsViewModel>(_userProjectsViewModel);
when(_userProjectsViewModel.fetchUserProjects()).thenReturn(null);
when(_userProjectsViewModel
.isSuccess(_userProjectsViewModel.FETCH_USER_PROJECTS))
.thenReturn(false);
await tester.pumpWidget(
GetMaterialApp(
onGenerateRoute: CVRouter.generateRoute,
navigatorObservers: [mockObserver],
home: ProfileView(),
),
);
/// The tester.pumpWidget() call above just built our app widget
/// and triggered the pushObserver method on the mockObserver once.
verify(mockObserver.didPush(any!, any));
}
testWidgets('finds Generic ProfileView widgets',
(WidgetTester tester) async {
await provideMockedNetworkImages(() async {
await _pumpProfileView(tester);
await tester.pumpAndSettle();
// Finds Profile Image
expect(find.byType(Image), findsOneWidget);
// Finds Username
expect(find.text('Test User'), findsOneWidget);
// Finds Joined, Country, Institute, Subscription
expect(find.byWidgetPredicate((widget) {
return widget is RichText &&
(widget.text.toPlainText().contains('Joined : ') ||
widget.text.toPlainText() == 'Country : India' ||
widget.text.toPlainText() ==
'Educational Institute : Gurukul' ||
widget.text.toPlainText() == 'Subscribed to mails : true');
}), findsNWidgets(4));
// Finds Tabs of Circuits, Favorites
expect(find.widgetWithText(Tab, 'Circuits'), findsOneWidget);
expect(find.widgetWithText(Tab, 'Favourites'), findsOneWidget);
});
});
});
}
|
import { IdMap, MockManager, MockRepository } from "medusa-test-utils"
import { add } from "winston"
import CustomerService from "../customer"
const eventBusService = {
emit: jest.fn(),
withTransaction: function() {
return this
},
}
describe("CustomerService", () => {
describe("retrieve", () => {
const customerRepository = MockRepository({
findOne: () => Promise.resolve({ id: IdMap.getId("ironman") }),
})
const customerService = new CustomerService({
manager: MockManager,
customerRepository,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully retrieves a customer", async () => {
const result = await customerService.retrieve(IdMap.getId("ironman"))
expect(customerRepository.findOne).toHaveBeenCalledTimes(1)
expect(customerRepository.findOne).toHaveBeenCalledWith({
where: { id: IdMap.getId("ironman") },
})
expect(result.id).toEqual(IdMap.getId("ironman"))
})
})
describe("retrieveByEmail", () => {
const customerRepository = MockRepository({
findOne: () => Promise.resolve({ id: IdMap.getId("ironman") }),
})
const customerService = new CustomerService({
manager: MockManager,
customerRepository,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully retrieves a customer by email", async () => {
const result = await customerService.retrieveByEmail("[email protected]")
expect(customerRepository.findOne).toHaveBeenCalledTimes(1)
expect(customerRepository.findOne).toHaveBeenCalledWith({
where: { email: "[email protected]" },
})
expect(result.id).toEqual(IdMap.getId("ironman"))
})
})
describe("retrieveByPhone", () => {
const customerRepository = MockRepository({
findOne: () => Promise.resolve({ id: IdMap.getId("ironman") }),
})
const customerService = new CustomerService({
manager: MockManager,
customerRepository,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully retrieves a customer by email", async () => {
const result = await customerService.retrieveByPhone("12341234")
expect(customerRepository.findOne).toHaveBeenCalledTimes(1)
expect(customerRepository.findOne).toHaveBeenCalledWith({
where: { phone: "12341234" },
})
expect(result.id).toEqual(IdMap.getId("ironman"))
})
})
describe("create", () => {
const customerRepository = MockRepository({
findOne: query => {
if (query.where.email === "[email protected]") {
return Promise.resolve({
id: IdMap.getId("exists"),
password_hash: "test",
})
}
return Promise.resolve({ id: IdMap.getId("ironman") })
},
})
const customerService = new CustomerService({
manager: MockManager,
customerRepository,
eventBusService,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully create a customer", async () => {
await customerService.create({
email: "[email protected]",
first_name: "Oliver",
last_name: "Juhl",
})
expect(customerRepository.create).toBeCalledTimes(1)
expect(customerRepository.create).toBeCalledWith({
email: "[email protected]",
first_name: "Oliver",
last_name: "Juhl",
})
})
it("successfully updates an existing customer on create", async () => {
await customerService.create({
email: "[email protected]",
password: "stark123",
has_account: false,
})
expect(customerRepository.save).toBeCalledTimes(1)
expect(customerRepository.save).toBeCalledWith({
id: IdMap.getId("exists"),
email: "[email protected]",
password_hash: expect.anything(),
has_account: true,
})
})
it("fails if email is in incorrect format", async () => {
await expect(
customerService.create({
email: "olivermedusa.com",
})
).rejects.toThrow("The email is not valid")
})
it("fails if billing address is in incorrect format", async () => {
await expect(
customerService.create({
email: "[email protected]",
first_name: "Oliver",
last_name: "Juhl",
billing_address: {
first_name: 1234,
},
})
).rejects.toThrow("The address is not valid")
})
})
describe("update", () => {
const customerRepository = MockRepository({
findOne: query => {
return Promise.resolve({ id: IdMap.getId("ironman") })
},
})
const addressRepository = MockRepository({
create: data => data,
save: data => Promise.resolve(data),
})
const customerService = new CustomerService({
manager: MockManager,
addressRepository,
customerRepository,
eventBusService,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully updates a customer", async () => {
await customerService.update(IdMap.getId("ironman"), {
first_name: "Olli",
last_name: "Test",
})
expect(customerRepository.save).toBeCalledTimes(1)
expect(customerRepository.save).toBeCalledWith({
id: IdMap.getId("ironman"),
first_name: "Olli",
last_name: "Test",
})
})
it("successfully updates customer metadata", async () => {
await customerService.update(IdMap.getId("ironman"), {
metadata: {
some: "test",
},
})
expect(customerRepository.save).toBeCalledTimes(1)
expect(customerRepository.save).toBeCalledWith({
id: IdMap.getId("ironman"),
metadata: {
some: "test",
},
})
})
it("successfully updates with billing address", async () => {
await customerService.update(IdMap.getId("ironman"), {
first_name: "Olli",
last_name: "Test",
billing_address: {
first_name: "Olli",
last_name: "Juhl",
address_1: "Laksegade",
city: "Copenhagen",
country_code: "DK",
postal_code: "2100",
phone: "+1 (222) 333 4444",
},
})
expect(customerRepository.save).toBeCalledTimes(1)
expect(customerRepository.save).toBeCalledWith({
id: IdMap.getId("ironman"),
first_name: "Olli",
last_name: "Test",
billing_address: {
first_name: "Olli",
last_name: "Juhl",
address_1: "Laksegade",
city: "Copenhagen",
country_code: "dk",
postal_code: "2100",
phone: "+1 (222) 333 4444",
},
})
})
})
describe("updateAddress", () => {
const addressRepository = MockRepository({
findOne: query => {
return Promise.resolve({
id: IdMap.getId("hollywood-boulevard"),
address_1: "Hollywood Boulevard 2",
})
},
})
const customerService = new CustomerService({
manager: MockManager,
addressRepository,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully updates address", async () => {
await customerService.updateAddress(
IdMap.getId("ironman"),
IdMap.getId("hollywood-boulevard"),
{
first_name: "Tony",
last_name: "Stark",
address_1: "Hollywood Boulevard 1",
city: "Los Angeles",
country_code: "us",
postal_code: "90046",
phone: "+1 (222) 333 4444",
}
)
expect(addressRepository.save).toBeCalledTimes(1)
expect(addressRepository.save).toBeCalledWith({
id: IdMap.getId("hollywood-boulevard"),
first_name: "Tony",
last_name: "Stark",
address_1: "Hollywood Boulevard 1",
city: "Los Angeles",
country_code: "us",
postal_code: "90046",
phone: "+1 (222) 333 4444",
})
})
it("throws on invalid address", async () => {
await expect(
customerService.updateAddress(
IdMap.getId("ironman"),
IdMap.getId("hollywood-boulevard"),
{
first_name: "Tony",
last_name: "Stark",
country_code: "us",
unknown: "key",
address_1: "Hollywood",
}
)
).rejects.toThrow("The address is not valid")
})
})
describe("removeAddress", () => {
const addressRepository = MockRepository({
findOne: query => {
return Promise.resolve({
id: IdMap.getId("hollywood-boulevard"),
address_1: "Hollywood Boulevard 2",
})
},
})
const customerService = new CustomerService({
manager: MockManager,
addressRepository,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully deletes address", async () => {
await customerService.removeAddress(
IdMap.getId("ironman"),
IdMap.getId("hollywood-boulevard")
)
expect(addressRepository.softRemove).toBeCalledTimes(1)
expect(addressRepository.softRemove).toBeCalledWith({
id: IdMap.getId("hollywood-boulevard"),
address_1: "Hollywood Boulevard 2",
})
})
})
describe("delete", () => {
const customerRepository = MockRepository({
findOne: query => {
return Promise.resolve({ id: IdMap.getId("ironman") })
},
})
const customerService = new CustomerService({
manager: MockManager,
customerRepository,
})
beforeEach(async () => {
jest.clearAllMocks()
})
it("successfully deletes customer", async () => {
await customerService.delete(IdMap.getId("ironman"))
expect(customerRepository.softRemove).toBeCalledTimes(1)
expect(customerRepository.softRemove).toBeCalledWith({
id: IdMap.getId("ironman"),
})
})
})
})
|
<?php
namespace CH\modules;
use CH\modificators\Log\CH_Log;
use Exception;
/**
* BC_Namespacer
* this class creates sitemap by composer and work with file sturcture by
* assoc array.
*/
class BC_Namespacer
{
public static string $composerPath;
/**
* createConfigFile
* creates config file by composer. gets only parents namespaces
* @param string $composerPath
* @param string $result
* @return array asoc all parent configs
*/
public static function createConfigFile(string $result = './namespaces.json')
{
$configs = self::getParentNameSpaces();
file_put_contents($result,json_encode($configs));
return $configs;
}
/**
* getParentNameSpaces
* returns parent namespaces using composer
* IMPORTANT static value $composerPath has to be initilazed !
* @return array
*/
public static function getParentNameSpaces(): array{
$configs = json_decode(file_get_contents(self::$composerPath),true);
// get only neccesary data
$configs = $configs['autoload']['psr-4'];
foreach($configs as $key => $value){
unset($configs[$key]);
$configs[stripslashes($key)] = $value;
}
foreach($configs as $key => $value){
$configs[$key] = explode(DIRECTORY_SEPARATOR,$value)[0];
}
return $configs;
}
/**
* generateNameSpaceByPath
* generates and returns namespace according to given path
* @param mixed $path
* @return void
*/
public static function generateNameSpaceByPath(string $path):string{
$parentNamespaces = self::getParentNameSpaces($path);
$path = explode(DIRECTORY_SEPARATOR,$path);
$result = '';
$number = null;
foreach($path as $num => $subPath){
foreach($parentNamespaces as $key =>$parentNamespace){
if($subPath == $parentNamespace){
$number = $num;
$result = $key;
}
}
}
if($result == ''){
throw new Exception('Wrond Path');
} else {
for($i = $number + 1; $i < count($path); ++$i){
$result .= "\\".$path[$i];
}
}
return $result;
}
}
|
package com.ainsigne.masterdetailitunes.data
import androidx.room.ColumnInfo
import androidx.room.Entity
import androidx.room.PrimaryKey
/**
* ItunesSearch class represents the query made with query, media and country parameter
*/
@Entity(tableName = "itunes_searches")
data class ItunesSearch(
/**
* timestamp [String] when it is searched
*/
@PrimaryKey() @ColumnInfo(name = "timestamp")
var timestamp : String = "21345",
/**
* term [String] the term to be searched
*/
var term : String = "avengers",
/**
* media [String] the media type to be searched
*/
var media : String = "movie",
/**
* TODO : unused
*/
var entity : String? = null,
/**
* country [String] the country criteria to b searched
*/
var country : String = "us") {
}
|
#include "SwapNodesInPairs.h"
using namespace lcpp;
ListNode *Solution24_1::swapPairs(ListNode *head) {
ListNode Dummy(0, head), *Tail = &Dummy, *P1 = head, *P2;
while (P1 != nullptr && (P2 = P1->next) != nullptr) {
P1->next = P2->next;
P2->next = P1;
Tail->next = P2;
Tail = P1;
P1 = P1->next;
}
return Dummy.next;
}
|
/**
* @file dayOfTheWeek.dart
* @brief Fournit le nom du jour de la semaine pour une date donnée
*/
/**
* Retourne la valeur en tant que nombre entier du jour dans le calendrier grégorien
* @param day Le jour de la date choisie
* @param month Le mois de la date choisie
* @param year L'année de la date choisie
* @return La valeur du jour
*/
int dayGregorianCalendar(int day, int month, int year) {
return (day +
((13 * (month + 1)) / 5).floor() +
year +
(year / 4).floor() -
(year / 100).floor() +
(year / 400).floor()) %
7;
}
/**
* Retourne la valeur en tant que nombre entier du jour dans le calendrier julien
* @param day Le jour de la date choisie
* @param month Le mois de la date choisie
* @param year L'année de la date choisie
* @return La valeur du jour
*/
int dayJulianCalendar(int day, int month, int year) {
return (day +
((13 * (month + 1)) / 5).floor() +
year +
(year / 4).floor() +
5) %
7;
}
/**
* Retourne la valeur du jour en tant que chaîne de caractères
* @param day Le nombre entier retourné par l'application de la formule de Zeller
* @return String Le nom du jour (lundi à dimanche)
*/
String nameOfTheDay(int day) {
String nameOfTheDay = "";
switch (day) {
case 0:
{
nameOfTheDay = "samedi";
}
break;
case 1:
{
nameOfTheDay = "dimanche";
}
break;
case 2:
{
nameOfTheDay = "lundi";
}
break;
case 3:
{
nameOfTheDay = "mardi";
}
break;
case 4:
{
nameOfTheDay = "mercredi";
}
break;
case 5:
{
nameOfTheDay = "jeudi";
}
break;
case 6:
{
nameOfTheDay = "vendredi";
}
break;
}
return nameOfTheDay;
}
|
package gg.rsmod.plugins.content.mechanics.prayer
import gg.rsmod.game.model.attr.AttributeKey
import gg.rsmod.game.model.attr.PROTECT_ITEM_ATTR
import gg.rsmod.game.model.bits.INFINITE_VARS_STORAGE
import gg.rsmod.game.model.bits.InfiniteVarsType
import gg.rsmod.game.model.entity.Player
import gg.rsmod.game.model.queue.QueueTask
import gg.rsmod.game.model.timer.TimerKey
import gg.rsmod.game.plugin.Plugin
import gg.rsmod.game.sync.block.UpdateBlockType
import gg.rsmod.plugins.api.GameframeTab
import gg.rsmod.plugins.api.InterfaceDestination
import gg.rsmod.plugins.api.PrayerIcon
import gg.rsmod.plugins.api.Skills
import gg.rsmod.plugins.api.ext.*
object Prayers {
private val PRAYER_DRAIN_COUNTER = AttributeKey<Int>()
val PRAYER_DRAIN = TimerKey()
private val DISABLE_OVERHEADS = TimerKey()
private const val DEACTIVATE_PRAYER_SOUND = 2663
private const val ACTIVE_PRAYERS_VARP = 83
private const val SELECTED_QUICK_PRAYERS_VARP = 84
//const val INF_PRAY_VARBIT = 5314
private const val QUICK_PRAYERS_ACTIVE_VARBIT = 4103
private const val KING_RANSOMS_QUEST_VARBIT = 3909 // Used for chivalry/piety prayer.
const val RIGOUR_UNLOCK_VARBIT = 5451
const val AUGURY_UNLOCK_VARBIT = 5452
const val PRESERVE_UNLOCK_VARBIT = 5453
fun disableOverheads(p: Player, cycles: Int) {
p.timers[DISABLE_OVERHEADS] = cycles
}
fun deactivateAll(p: Player) {
p.setVarp(ACTIVE_PRAYERS_VARP, 0)
p.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 0)
p.attr.remove(PROTECT_ITEM_ATTR)
if (p.prayerIcon != -1) {
p.prayerIcon = -1
p.addBlock(UpdateBlockType.APPEARANCE)
}
}
suspend fun toggle(it: QueueTask, prayer: Prayer) {
val p = it.player
if (p.isDead() || !p.lock.canUsePrayer()) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
return
} else if (!checkRequirements(it, prayer)) {
return
} else if (prayer.group == PrayerGroup.OVERHEAD && p.timers.has(DISABLE_OVERHEADS)) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
p.message("You cannot use overhead prayers right now.")
return
} else if (p.getSkills().getCurrentLevel(Skills.PRAYER) == 0) {
return
}
it.terminateAction = { p.syncVarp(ACTIVE_PRAYERS_VARP) }
while (p.lock.delaysPrayer()) {
it.wait(1)
}
val active = p.getVarbit(prayer.varbit) != 0
if (active) {
deactivate(p, prayer)
} else {
activate(p, prayer)
}
p.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 0)
}
fun activate(p: Player, prayer: Prayer) {
if (!isActive(p, prayer)) {
val others = Prayer.values.filter { other -> prayer != other && other.group != null &&
(prayer.group == other.group || prayer.overlap.contains(other.group)) }
others.forEach { other ->
if (p.getVarbit(other.varbit) != 0) {
p.setVarbit(other.varbit, 0)
}
}
p.setVarbit(prayer.varbit, 1)
if (prayer.sound != -1) {
p.playSound(prayer.sound)
}
setOverhead(p)
if (prayer == Prayer.PROTECT_ITEM) {
p.attr[PROTECT_ITEM_ATTR] = true
}
}
}
fun deactivate(p: Player, prayer: Prayer) {
if (isActive(p, prayer)) {
p.setVarbit(prayer.varbit, 0)
p.playSound(DEACTIVATE_PRAYER_SOUND)
setOverhead(p)
if (prayer == Prayer.PROTECT_ITEM) {
p.attr[PROTECT_ITEM_ATTR] = false
}
}
}
fun drainPrayer(p: Player) {
if (p.isDead() || p.getVarp(ACTIVE_PRAYERS_VARP) == 0 || p.hasStorageBit(INFINITE_VARS_STORAGE, InfiniteVarsType.PRAY)) {
p.attr.remove(PRAYER_DRAIN_COUNTER)
return
}
val drain = calculateDrainRate(p)
if (drain > 0) {
val counter = p.attr.getOrDefault(PRAYER_DRAIN_COUNTER, 0) + drain
val resistance = 60 + (p.getPrayerBonus() * 2)
if (counter >= resistance) {
val points = Math.floor((counter / resistance).toDouble()).toInt()
p.getSkills().alterCurrentLevel(Skills.PRAYER, -points)
p.attr.put(PRAYER_DRAIN_COUNTER, counter - (resistance * points))
} else {
p.attr.put(PRAYER_DRAIN_COUNTER, counter)
}
}
if (p.getSkills().getCurrentLevel(Skills.PRAYER) == 0) {
deactivateAll(p)
p.message("You have run out of prayer points, you can recharge at an altar.")
}
}
fun selectQuickPrayer(it: Plugin, prayer: Prayer) {
val player = it.player
if (player.isDead() || !player.lock.canUsePrayer()) {
player.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 0)
return
}
val slot = prayer.quickPrayerSlot
val enabled = (player.getVarp(SELECTED_QUICK_PRAYERS_VARP) and (1 shl slot)) != 0
it.player.queue {
if (!enabled) {
if (checkRequirements(this, prayer)) {
val others = Prayer.values.filter { other -> prayer != other && other.group != null &&
(prayer.group == other.group || prayer.overlap.contains(other.group)) }
others.forEach { other ->
val otherEnabled = (player.getVarp(SELECTED_QUICK_PRAYERS_VARP) and (1 shl other.quickPrayerSlot)) != 0
if (otherEnabled) {
player.setVarp(SELECTED_QUICK_PRAYERS_VARP, player.getVarp(SELECTED_QUICK_PRAYERS_VARP) and (1 shl other.quickPrayerSlot).inv())
}
}
player.setVarp(SELECTED_QUICK_PRAYERS_VARP, player.getVarp(SELECTED_QUICK_PRAYERS_VARP) or (1 shl slot))
}
} else {
player.setVarp(SELECTED_QUICK_PRAYERS_VARP, player.getVarp(SELECTED_QUICK_PRAYERS_VARP) and (1 shl slot).inv())
}
}
}
fun toggleQuickPrayers(p: Player, opt: Int) {
if (p.isDead() || !p.lock.canUsePrayer()) {
p.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 0)
return
}
if (opt == 1) {
val quickPrayers = p.getVarp(SELECTED_QUICK_PRAYERS_VARP)
when {
quickPrayers == 0 -> {
p.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 0)
p.message("You haven't selected any quick-prayers.")
}
p.getSkills().getCurrentLevel(Skills.PRAYER) <= 0 -> {
p.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 0)
p.message("You have run out of prayer points, you can recharge at an altar.")
}
p.getVarp(ACTIVE_PRAYERS_VARP) == quickPrayers -> {
/*
* All active prayers are quick-prayers - so we turn them off.
*/
p.setVarp(ACTIVE_PRAYERS_VARP, 0)
p.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 0)
setOverhead(p)
}
else -> {
p.setVarp(ACTIVE_PRAYERS_VARP, quickPrayers)
p.setVarbit(QUICK_PRAYERS_ACTIVE_VARBIT, 1)
setOverhead(p)
}
}
} else if (opt == 2) {
p.setInterfaceEvents(interfaceId = 77, component = 4, from = 0, to = 29, setting = 2)
p.openInterface(interfaceId = 77, dest = InterfaceDestination.PRAYER)
p.focusTab(GameframeTab.PRAYER)
}
}
fun isActive(p: Player, prayer: Prayer): Boolean = p.getVarbit(prayer.varbit) != 0
private suspend fun checkRequirements(it: QueueTask, prayer: Prayer): Boolean {
val p = it.player
if (p.getSkills().getMaxLevel(Skills.PRAYER) < prayer.level) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
it.messageBox("You need a <col=000080>Prayer</col> level of ${prayer.level} to use <col=000080>${prayer.named}.")
return false
}
// TODO(Tom): get correct messages for these unlockable
if (prayer == Prayer.PRESERVE && p.getVarbit(PRESERVE_UNLOCK_VARBIT) == 0) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
it.messageBox("You have not unlocked this prayer.")
return false
}
if (prayer == Prayer.CHIVALRY && p.getVarbit(KING_RANSOMS_QUEST_VARBIT) < 8) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
it.messageBox("You have not unlocked this prayer.")
return false
}
if (prayer == Prayer.PIETY && p.getVarbit(KING_RANSOMS_QUEST_VARBIT) < 8) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
it.messageBox("You have not unlocked this prayer.")
return false
}
if (prayer == Prayer.RIGOUR && p.getVarbit(RIGOUR_UNLOCK_VARBIT) == 0) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
it.messageBox("You have not unlocked this prayer.")
return false
}
if (prayer == Prayer.AUGURY && p.getVarbit(AUGURY_UNLOCK_VARBIT) == 0) {
p.syncVarp(ACTIVE_PRAYERS_VARP)
it.messageBox("You have not unlocked this prayer.")
return false
}
return true
}
private fun setOverhead(p: Player) {
val icon = when {
isActive(p, Prayer.PROTECT_FROM_MELEE) -> PrayerIcon.PROTECT_FROM_MELEE
isActive(p, Prayer.PROTECT_FROM_MISSILES) -> PrayerIcon.PROTECT_FROM_MISSILES
isActive(p, Prayer.PROTECT_FROM_MAGIC) -> PrayerIcon.PROTECT_FROM_MAGIC
isActive(p, Prayer.RETRIBUTION) -> PrayerIcon.RETRIBUTION
isActive(p, Prayer.SMITE) -> PrayerIcon.SMITE
isActive(p, Prayer.REDEMPTION) -> PrayerIcon.REDEMPTION
else -> PrayerIcon.NONE
}
if (p.prayerIcon != icon.id) {
p.prayerIcon = icon.id
p.addBlock(UpdateBlockType.APPEARANCE)
}
}
private fun calculateDrainRate(p: Player): Int = Prayer.values.filter { isActive(p, it) }.sumBy { it.drainEffect }
}
|
// <copyright file="IByteArrayMarshaller.cs" company="Microsoft Corporation">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
namespace Microsoft.Azure.Networking.Infrastructure.RingMaster.Backend
{
using RequestResponse = Microsoft.Azure.Networking.Infrastructure.RingMaster.Requests.RequestResponse;
/// <summary>
/// Interface IByteArrayMarshaller abstract the hability to read and write into byte arrays requests and responses
/// </summary>
public interface IByteArrayMarshaller
{
/// <summary>
/// Serializes the request as bytes.
/// </summary>
/// <param name="request">The request.</param>
/// <returns>serialized bytes</returns>
byte[] SerializeRequestAsBytes(RequestCall request);
/// <summary>
/// Deserializes the request from bytes.
/// </summary>
/// <param name="requestBytes">The request bytes.</param>
/// <returns>deserialized object</returns>
RequestCall DeserializeRequestFromBytes(byte[] requestBytes);
/// <summary>
/// Serializes the response as bytes.
/// </summary>
/// <param name="response">The response.</param>
/// <returns>serialized bytes.</returns>
byte[] SerializeResponseAsBytes(RequestResponse response);
/// <summary>
/// Deserializes the response from bytes.
/// </summary>
/// <param name="responseBytes">The response bytes.</param>
/// <returns>deserialized object</returns>
RequestResponse DeserializeResponseFromBytes(byte[] responseBytes);
}
}
|
module Api
module V1
class SpaNSalonsController < ApplicationController
before_action :auth
before_action :set_spa_n_salon, only: %i[show update destroy]
# GET /spa_n_salons
def index
@spa_n_salons = SpaNSalon.all
render json: @spa_n_salons
end
# GET /spa_n_salons/1
def show
@services = @spa_n_salon.services.order('cost asc')
render json: @services
end
# POST /spa_n_salons
def create
@spa_n_salon = SpaNSalon.new(spa_n_salon_params)
if @spa_n_salon.save
render json: @spa_n_salon, status: :ok
else
render json: @spa_n_salon.errors, status: :unprocessable_entity
end
end
# PATCH/PUT /spa_n_salons/1
def update
if @spa_n_salon.update(spa_n_salon_params)
render json: @spa_n_salon
else
render json: @spa_n_salon.errors, status: :unprocessable_entity
end
end
# DELETE /spa_n_salons/1
def destroy
if @spa_n_salon.destroy
render json: { message: 'deleted successfully' }, status: :ok
else
render json: @spa_n_salon.errors, status: :unprocessable_entity
end
end
def sorted_service
@salon = SpaNSalon.joins('INNER JOIN Services on spa_n_salons.id = services.spa_n_salon_id')
.select('*').order('cost asc')
render json: @salon
end
private
# Use callbacks to share common setup or constraints between actions.
def set_spa_n_salon
@spa_n_salon = SpaNSalon.find(params[:id])
end
# Only allow a list of trusted parameters through.
def spa_n_salon_params
params.require(:spa_n_salon).permit(:companyName, :address, :available_chairs, :owner_id)
end
end
end
end
|
#!/bin/bash
cd $(dirname $0)
[ -d target/release ] || \
cargo build --release
exec ./target/release/helloworld
|
<?php
namespace SlimSkeletonWebAPI\DAO;
use Doctrine\ORM\EntityManager;
use Doctrine\ORM\EntityRepository;
use SlimSkeletonWebAPI\Entities\BaseEntity;
abstract class BaseDAO
{
/**
* @var \Doctrine\ORM\EntityManager
*/
private $_entityManager = null;
/**
* @var \Doctrine\ORM\EntityRepository
*/
private $_entityRepository = null;
/**
* @var string
*/
private $_entityClass = null;
public function __construct(EntityManager $entityManager, $entityClass)
{
if (!isset($entityClass)) {
throw new \Exception('The parameter $entityClass not set!');
}
$this->_entityManager = $entityManager;
$this->_entityClass = $entityClass;
}
/**
* Get entity class name
*
* @return string
*/
public function getEntityClass()
{
if (!isset($this->_entityClass)) {
throw new \Exception('The EntityClass not set!');
}
return $this->_entityClass;
}
/**
* Get instance of entity manager
*
* @return Doctrine\ORM\EntityManager
*/
public function getEntityManagerInstance()
{
if (!isset($this->_entityManager)) {
throw new \Exception('The EntityManager not set!');
}
return $this->_entityManager;
}
/**
* Get singleton instance entity repository
*
* @return Doctrine\ORM\EntityRepository
*/
public function getEntityRepositoryInstance()
{
if ($this->_entityRepository === null) {
$this->_entityRepository = $this->getEntityManagerInstance()->getRepository($this->getEntityClass());
}
return $this->_entityRepository;
}
/**
* Return all records.
*
* @return array of entities
*/
public function getAll()
{
$entities = $this->getEntityRepositoryInstance()->findAll();
return $entities;
}
/**
* Return one record.
*
* @param integer $id
* @return object entity
*/
public function getById($id)
{
$entity = $this->getEntityRepositoryInstance()->find($id);
return $entity;
}
/**
* Delete record in DB.
*
* @param integer $id
*
* @return boolean
*/
public function delete($id)
{
$entity = $this->getById($id);
if ($entity !== null) {
$this->getEntityManagerInstance()->remove($entity);
$this->getEntityManagerInstance()->flush();
}
$entity = $this->getById($id);
return ($entity === null);
}
/**
* Insert or update record in DB.
*
* @param BaseEntity $entity
*
* @return object entity
*/
public function insertOrUpdate(BaseEntity $entity)
{
if ($entity->getId() != 0) {
$modEntity = $entity;
$entity = $this->getById($entity->getId());
$this->setProperties($entity, $modEntity);
} elseif (method_exists($entity, "setCreatedAt")) {
$entity->setCreatedAt(new \DateTime('now'));
}
$this->getEntityManagerInstance()->persist($entity);
$this->getEntityManagerInstance()->flush();
return $this->getById($entity->getId());
}
/**
* Set new values to update entity by reflection
*
* @param BaseEntity $entity reference for record in DB
* @param BaseEntity $modEntity entity with new values
*
* @return void
*/
private function setProperties(&$entity, &$modEntity)
{
$class = get_class($entity);
$methods = get_class_methods($class);
foreach ($methods as $method) {
preg_match(' /^(set)(.*?)$/i', $method, $results);
$pre = $results[1] ?? '';
if ($pre == 'set') {
$getMethod = str_replace('set', 'get', $method);
$newValue = $modEntity->$getMethod();
if (!empty($newValue)) {
$entity->$method($newValue);
}
}
}
}
}
|
#!/usr/bin/perl -w
if($#ARGV != 0) {
print "USAGE: perl gen-generic-asyncAwait.pl <Total Args>\n";
exit();
}
####################################################
##include "hclib-utils.h"
#namespace hclib
#{
# using namespace std;
#
# void asyncAwait(DDF_t* ddf0, DDF_t* ddf1, std::function<void()> &&lambda)
# {
# int ddfs = 2+1;
# DDF_t** ddfList = (DDF_t**) malloc(sizeof(DDF_t *) * ddfs);
# ddfList[0] = ddf0;
# ddfList[1] = ddf1;
# ddfList[2] = NULL;
#
# std::function<void()> * copy_of_lambda = new std::function<void()> (lambda);
# ::async(&async_cpp_wrapper, (void *)copy_of_lambda, ddfList, NO_PHASER, NO_PROP);
# }
#
#}
####################################################
print "#include \"hclib-utils.h\"\n";
print "namespace hclib { \n";
print " using namespace std; \n";
for (my $j=0; $j<$ARGV[0]; $j++) {
print " void asyncAwait(DDF_t* ddf0";
#Printing the DDF_t parameters
for (my $i=1; $i<=$j; $i++) {
print ", DDF_t* ddf$i";
}
print ", std::function<void()> &&lambda) {\n";
my $ddfs = $j + 2;
print " int ddfs = $ddfs;\n";
print " DDF_t** ddfList = (DDF_t**) malloc(sizeof(DDF_t *) * ddfs);\n";
for (my $i=0; $i<=$j; $i++) {
print " ddfList[$i] = ddf$i; \n";
}
$ddfs = $ddfs - 1;
print " ddfList[$ddfs] = NULL; \n";
print " std::function<void()> * copy_of_lambda = new std::function<void()> (lambda);\n";
print " ::async(&async_cpp_wrapper, (void *)copy_of_lambda, ddfList, NO_PHASER, NO_PROP);\n";
print " }\n";
}
print "}\n";
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Exams;
use Auth;
class ExamController extends Controller
{
//
public function jobtest(Request $request)
{
$exam = new Exams();
$exam->q1 = $request->input('q1');
$exam->q2 = $request->input('q2');
$exam->q3 = $request->input('q3');
$exam->q4 = $request->input('q4');
$exam->q5 = $request->input('q5');
$exam->user_id = Auth::id();
$exam->save();
return back()->with('success', 'Check your for the Next Step...');
}
}
|
package scredis
import akka.actor.ActorSystem
import com.typesafe.config.Config
import scredis.commands._
import scredis.io.SubscriberAkkaConnection
import scala.concurrent.Future
import scala.concurrent.duration._
/**
* Defines a Pub/Sub Redis client capable of subscribing to channels/patterns.
*
* @param host server address
* @param port server port
* @param passwordOpt optional server password
* @param nameOpt optional client name (available since 2.6.9)
* @param connectTimeout connection timeout
* @param receiveTimeoutOpt optional batch receive timeout
* @param maxWriteBatchSize max number of bytes to send as part of a batch
* @param tcpSendBufferSizeHint size hint of the tcp send buffer, in bytes
* @param tcpReceiveBufferSizeHint size hint of the tcp receive buffer, in bytes
* @param akkaListenerDispatcherPath path to listener dispatcher definition
* @param akkaIODispatcherPath path to io dispatcher definition
* @param akkaDecoderDispatcherPath path to decoder dispatcher definition
*
* @define e [[scredis.exceptions.RedisErrorResponseException]]
* @define client [[scredis.SubscriberClient]]
* @define tc com.typesafe.Config
*/
class SubscriberClient(
host: String = RedisConfigDefaults.Redis.Host,
port: Int = RedisConfigDefaults.Redis.Port,
passwordOpt: Option[String] = RedisConfigDefaults.Redis.PasswordOpt,
nameOpt: Option[String] = RedisConfigDefaults.Redis.NameOpt,
connectTimeout: FiniteDuration = RedisConfigDefaults.IO.ConnectTimeout,
receiveTimeoutOpt: Option[FiniteDuration] = RedisConfigDefaults.IO.ReceiveTimeoutOpt,
maxWriteBatchSize: Int = RedisConfigDefaults.IO.MaxWriteBatchSize,
tcpSendBufferSizeHint: Int = RedisConfigDefaults.IO.TCPSendBufferSizeHint,
tcpReceiveBufferSizeHint: Int = RedisConfigDefaults.IO.TCPReceiveBufferSizeHint,
akkaListenerDispatcherPath: String = RedisConfigDefaults.IO.Akka.ListenerDispatcherPath,
akkaIODispatcherPath: String = RedisConfigDefaults.IO.Akka.IODispatcherPath,
akkaDecoderDispatcherPath: String = RedisConfigDefaults.IO.Akka.DecoderDispatcherPath
)(implicit system: ActorSystem) extends SubscriberAkkaConnection(
system = system,
host = host,
port = port,
passwordOpt = passwordOpt,
nameOpt = nameOpt,
connectTimeout = connectTimeout,
receiveTimeoutOpt = receiveTimeoutOpt,
maxWriteBatchSize = maxWriteBatchSize,
tcpSendBufferSizeHint = tcpSendBufferSizeHint,
tcpReceiveBufferSizeHint = tcpReceiveBufferSizeHint,
decodersCount = 2,
akkaListenerDispatcherPath = akkaListenerDispatcherPath,
akkaIODispatcherPath = akkaIODispatcherPath,
akkaDecoderDispatcherPath = akkaDecoderDispatcherPath
) with SubscriberCommands {
/**
* Constructs a $client instance from a [[scredis.RedisConfig]]
*
* @param config [[scredis.RedisConfig]]
* @return the constructed $client
*/
def this(config: RedisConfig)(implicit system: ActorSystem) = this(
host = config.Redis.Host,
port = config.Redis.Port,
passwordOpt = config.Redis.PasswordOpt,
nameOpt = config.Redis.NameOpt,
connectTimeout = config.IO.ConnectTimeout,
receiveTimeoutOpt = config.IO.ReceiveTimeoutOpt,
maxWriteBatchSize = config.IO.MaxWriteBatchSize,
tcpSendBufferSizeHint = config.IO.TCPSendBufferSizeHint,
tcpReceiveBufferSizeHint = config.IO.TCPReceiveBufferSizeHint,
akkaListenerDispatcherPath = config.IO.Akka.ListenerDispatcherPath,
akkaIODispatcherPath = config.IO.Akka.IODispatcherPath,
akkaDecoderDispatcherPath = config.IO.Akka.DecoderDispatcherPath
)
/**
* Constructs a $client instance from a $tc
*
* @note The config must contain the scredis object at its root.
* This constructor is equivalent to {{{
* new Client(config, "scredis")
* }}}
*
* @param config $tc
* @return the constructed $client
*/
def this(config: Config)(implicit system: ActorSystem) = this(RedisConfig(config))
/**
* Constructs a $client instance from a config file.
*
* @note The config file must contain the scredis object at its root.
* This constructor is equivalent to {{{
* new Client(configName, "scredis")
* }}}
*
* @param configName config filename
* @return the constructed $client
*/
def this(configName: String)(implicit system: ActorSystem) = this(RedisConfig(configName))
/**
* Constructs a $client instance from a config file and using the provided path.
*
* @note The path must include to the scredis object, e.g. x.y.scredis
*
* @param configName config filename
* @param path path pointing to the scredis config object
* @return the constructed $client
*/
def this(configName: String, path: String)(implicit system: ActorSystem) = this(
RedisConfig(configName, path)
)
/**
* Authenticates to the server.
*
* @note Use the empty string to re-authenticate with no password.
*
* @param password the server password
* @throws $e if authentication failed
*
* @since 1.0.0
*/
def auth(password: String): Future[Unit] = authenticate(password)
/**
* Sets the current client name. If the empty string is provided, the name will be unset.
*
* @param name name to associate the client to, if empty, unsets the client name
*
* @since 2.6.9
*/
def clientSetName(name: String): Future[Unit] = setName(name)
/**
* Unsubscribes from all subscribed channels/patterns and then closes the connection.
*/
def quit(): Future[Unit] = shutdown()
watchTermination()
}
/**
* The companion object provides additional friendly constructors.
*
* @define client [[scredis.SubscriberClient]]
* @define tc com.typesafe.Config
*/
object SubscriberClient {
/**
* Creates a $client
*
* @param host server address
* @param port server port
* @param passwordOpt optional server password
* @param nameOpt optional client name (available since 2.6.9)
* @param connectTimeout connection timeout
* @param receiveTimeoutOpt optional batch receive timeout
* @param maxWriteBatchSize max number of bytes to send as part of a batch
* @param tcpSendBufferSizeHint size hint of the tcp send buffer, in bytes
* @param tcpReceiveBufferSizeHint size hint of the tcp receive buffer, in bytes
* @param akkaListenerDispatcherPath path to listener dispatcher definition
* @param akkaIODispatcherPath path to io dispatcher definition
* @param akkaDecoderDispatcherPath path to decoder dispatcher definition
*/
def apply(
host: String = RedisConfigDefaults.Redis.Host,
port: Int = RedisConfigDefaults.Redis.Port,
passwordOpt: Option[String] = RedisConfigDefaults.Redis.PasswordOpt,
nameOpt: Option[String] = RedisConfigDefaults.Redis.NameOpt,
connectTimeout: FiniteDuration = RedisConfigDefaults.IO.ConnectTimeout,
receiveTimeoutOpt: Option[FiniteDuration] = RedisConfigDefaults.IO.ReceiveTimeoutOpt,
maxWriteBatchSize: Int = RedisConfigDefaults.IO.MaxWriteBatchSize,
tcpSendBufferSizeHint: Int = RedisConfigDefaults.IO.TCPSendBufferSizeHint,
tcpReceiveBufferSizeHint: Int = RedisConfigDefaults.IO.TCPReceiveBufferSizeHint,
akkaListenerDispatcherPath: String = RedisConfigDefaults.IO.Akka.ListenerDispatcherPath,
akkaIODispatcherPath: String = RedisConfigDefaults.IO.Akka.IODispatcherPath,
akkaDecoderDispatcherPath: String = RedisConfigDefaults.IO.Akka.DecoderDispatcherPath
)(implicit system: ActorSystem): SubscriberClient = new SubscriberClient(
host = host,
port = port,
passwordOpt = passwordOpt,
nameOpt = nameOpt,
connectTimeout = connectTimeout,
receiveTimeoutOpt = receiveTimeoutOpt,
maxWriteBatchSize = maxWriteBatchSize,
tcpSendBufferSizeHint = tcpSendBufferSizeHint,
tcpReceiveBufferSizeHint = tcpReceiveBufferSizeHint,
akkaListenerDispatcherPath = akkaListenerDispatcherPath,
akkaIODispatcherPath = akkaIODispatcherPath,
akkaDecoderDispatcherPath = akkaDecoderDispatcherPath
)
/**
* Constructs a $client instance from a [[scredis.RedisConfig]]
*
* @param config [[scredis.RedisConfig]]
* @return the constructed $client
*/
def apply(config: RedisConfig)(
implicit system: ActorSystem
): SubscriberClient = new SubscriberClient(config)
/**
* Constructs a $client instance from a $tc
*
* @note The config must contain the scredis object at its root.
* This constructor is equivalent to {{{
* Client(config, "scredis")
* }}}
*
* @param config $tc
* @return the constructed $client
*/
def apply(config: Config)(
implicit system: ActorSystem
): SubscriberClient = new SubscriberClient(config)
/**
* Constructs a $client instance from a config file.
*
* @note The config file must contain the scredis object at its root.
* This constructor is equivalent to {{{
* Client(configName, "scredis")
* }}}
*
* @param configName config filename
* @return the constructed $client
*/
def apply(configName: String)(
implicit system: ActorSystem
): SubscriberClient = new SubscriberClient(configName)
/**
* Constructs a $client instance from a config file and using the provided path.
*
* @note The path must include to the scredis object, e.g. x.y.scredis
*
* @param configName config filename
* @param path path pointing to the scredis config object
* @return the constructed $client
*/
def apply(configName: String, path: String)(
implicit system: ActorSystem
): SubscriberClient = new SubscriberClient(configName, path)
}
|
require 'singleton'
module Healthy
class ServerIdentity
class << self
attr_accessor :identity
def establish(string_or_method_name = nil, &block)
self.identity = if block
yield
elsif string_or_method_name.is_a? Symbol
self.send(string_or_method_name)
else
string_or_method_name
end
if identity.nil? || identity.empty?
raise ArgumentError, "server identity cannot be nil or empty"
end
end
def matches?(test)
test.nil? || test.empty? || test == identity || test.is_a?(Array) && test.include?(identity)
end
def fqdn
fqdn = `hostname --fqdn`.strip
fqdn = `hostname`.strip if fqdn == ''
fqdn
end
# TODO
def ip_and_port
end
end
end
end
|
import datetime
from pathlib import Path
from scripts import config_provider, csv_aggregator, chart_generator
def main():
results_dir = __get_results_dir()
csv_aggregator_config = config_provider.get_csv_aggregator_config()
agg_csv = csv_aggregator.aggregate(csv_aggregator_config, results_dir)
chart_generator_config = config_provider.get_chart_generator_config(csv_aggregator_config, agg_csv)
chart_generator.perform_chart_creation(chart_generator_config, results_dir)
def __get_results_dir() -> Path:
path = (Path(__file__).absolute().parents[1] / "results" / "reports" /
datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
path.mkdir(parents=True, exist_ok=True)
return path
if __name__ == "__main__":
main()
|
# frozen_string_literal: true
module CollectionspaceMigrationTools
# mixin module for mappable entities - sets :@mapper and :@status instance variables
#
# Classes mixing this in need to have the following methods:
# - name
module Mappable
include Dry::Monads[:result]
def get_mapper
CMT::Parse::RecordMapper.call(name).either(
->(mapper){ @mapper = mapper; @status = Success(mapper) },
->(failure){ @status = Failure(failure) }
)
end
def to_monad
status
end
def to_s
name
end
end
end
|
class Leakybucket::Bucket
attr_accessor :limit, :value, :leaking_callback, :key
def initialize(options = {}, key = '')
self.limit = default_options.merge(options)[:limit].to_i
self.value = self.limit
self.key = key
end
def default_options
{limit: 3}
end
def decrement
self.value -= 1
leaking! if leaking?
end
def increment
return if value >= limit
self.value += 1
end
def leaking?
value < 0
end
def leaking!
leaking_callback.(value) if leaking_callback.respond_to?(:call)
end
def reset
self.value = limit
end
end
|
var acceptParams = require('../helper.js').acceptParams;
var testArray = require('../helper.js').testArray;
if ($.writeln !== void 0) {
var console = {
log: function(obj) {
$.writeln(obj);
}
};
} else {
var console = window.console;
}
var callback = function(currentValue, index, array) {
return (typeof currentValue === 'number');
};
console.log("================== Array.prototype.every ==================");
console.log("");
console.log("+++++++ The following should produce Error. +++++++");
console.log("");
acceptParams(Array.prototype.every, ["string"], "Array.prototype.every(string)", testArray);
acceptParams(Array.prototype.every, [void 0], "Array.prototype.every(undefined)", testArray);
acceptParams(Array.prototype.every, [1], "Array.prototype.every(number)", testArray);
acceptParams(Array.prototype.every, callback, "Array.prototype.every(callback)", null);
acceptParams(Array.prototype.every, callback, "Array.prototype.every(callback)", void 0);
console.log("");
console.log("");
console.log("+++++++ The following should be OK. +++++++");
console.log("");
var r = acceptParams(Array.prototype.every, [callback], "Array.prototype.every(callback)", testArray);
console.log(r);
console.log(testArray);
console.log("");
|
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
require APPPATH . '/libraries/REST_Controller.php';
class Obat extends REST_Controller {
public function __construct()
{
parent::__construct();
$this->load->model(['m_obat']);
}
public function index_get()
{
$id = $this->get('obat_id');
if ($id == '') {
$obat = $this->m_obat->read_data();
} else {
$this->db->where('obat_id', $id);
$obat = $this->db->get('m_obat')->result_array();
}
if ($obat) {
$this->response($obat, 200);
} else {
$this->response(['Tidak Ada Data Obat Tersedia'], 404);
}
}
public function index_post()
{
$data = [
'obat_nama' => $this->input->post('obat_nama'),
'pengertian' => $this->input->post('pengertian'),
'indikasi' => $this->input->post('indikasi'),
'kontradiksi' => $this->input->post('kontradiksi'),
'efek_samping' => $this->input->post('efek_samping'),
'penggunaan' => $this->input->post('penggunaan'),
'perhatian' => $this->input->post('perhatian'),
'dosis' => $this->input->post('dosis'),
'kemasan' => $this->input->post('kemasan'),
];
$status = $this->m_obat->create_data($data);
if ($status == TRUE) {
$this->response([
'status' => TRUE,
'data' => $data,
'message' => 'Data Berhasil ditambah!',
],
REST_Controller::HTTP_CREATED);
} else {
$this->response(['status' => FALSE, 'message' => 'Data Gagal ditambah!', 502]);
}
}
public function index_put()
{
$id = $this->put('obat_id');
$data = [
'obat_id' => $this->put('obat_id'),
'obat_nama' => $this->put('obat_nama'),
'pengertian' => $this->put('pengertian'),
'indikasi' => $this->put('indikasi'),
'kontradiksi' => $this->put('kontradiksi'),
'efek_samping' => $this->put('efek_samping'),
'penggunaan' => $this->put('penggunaan'),
'perhatian' => $this->put('perhatian'),
'dosis' => $this->put('dosis'),
'kemasan' => $this->put('kemasan'),
];
$this->db->where('obat_id', $id);
$status = $this->db->update('m_obat', $data);
if ($status) {
$this->response([
'status' => TRUE,
'data' => $data,
'message' => 'Data Berhasil diubah!',
],
REST_Controller::HTTP_CREATED);
} else {
$this->response(['status' => FALSE, 'message' => 'Data Gagal diubah!', 502]);
}
}
public function index_delete()
{
$id = $this->delete('obat_id');
$this->db->where('obat_id', $id);
$status = $this->db->delete('m_obat');
if ($status) {
$this->response(array('status' => TRUE, 'message' => 'Data Berhasil dihapus'), 201);
} else {
$this->response(array('status' => FALSE, 'message' => 'Data Gagal dihapus', 502));
}
}
}
|
// Copyright 2019 SumUp Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package logger
import (
"os"
gsyslog "github.com/hashicorp/go-syslog"
"github.com/palantir/stacktrace"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
const (
// Logger encoding types.
EncodingJSON = "json"
EncodingPlain = "plain"
// LogLevelPanic level, highest level of severity. Logs and then calls panic with the
// message passed to Debug, Info, ...
LogLevelPanic = "PANIC"
// LogLevelFatal level. Logs and then calls `os.Exit(1)`. It will exit even if the
// logging level is set to Panic.
LogLevelFatal = "FATAL"
// LogLevelError level. Logs. Used for errors that should definitely be noted.
// Commonly used for hooks to send errors to an error tracking service.
LogLevelError = "ERROR"
// LogLevelWarn level. Non-critical entries that deserve eyes.
LogLevelWarn = "WARN"
// LogInfoLevel level. General operational entries about what's going on inside the
// application.
LogLevelInfo = "INFO"
// LogLevelDebug level. Usually only enabled when debugging. Very verbose logging.
LogLevelDebug = "DEBUG"
)
var (
zapLogLevels = map[string]zapcore.Level{
LogLevelPanic: zapcore.PanicLevel,
LogLevelFatal: zapcore.FatalLevel,
LogLevelError: zapcore.ErrorLevel,
LogLevelWarn: zapcore.WarnLevel,
LogLevelInfo: zapcore.InfoLevel,
LogLevelDebug: zapcore.DebugLevel,
}
defaultZapEncoderConfig = zapcore.EncoderConfig{
MessageKey: "msg",
LevelKey: "level",
TimeKey: "time",
NameKey: "logger",
CallerKey: "caller",
StacktraceKey: "stacktrace",
LineEnding: zapcore.DefaultLineEnding,
EncodeLevel: zapcore.LowercaseLevelEncoder,
EncodeTime: zapcore.ISO8601TimeEncoder,
EncodeDuration: zapcore.SecondsDurationEncoder,
EncodeCaller: zapcore.ShortCallerEncoder,
}
)
type Configuration struct {
Level string
Encoding string
StdoutEnabled bool
SyslogEnabled bool
// SyslogFacility is one of `KERN,USER,MAIL,DAEMON,AUTH,SYSLOG,LPR,NEWS,UUCP,CRON,AUTHPRIV,FTP,LOCAL0,
// LOCAL1,LOCAL2,LOCAL3,LOCAL4,LOCAL5,LOCAL6,LOCAL7`
SyslogFacility string
// SyslogTag is tag for all messages produced
SyslogTag string
}
func NewZapLogger(config Configuration) (*ZapLogger, error) {
encoder, err := newEncoder(config.Encoding, &defaultZapEncoderConfig)
if err != nil {
return nil, stacktrace.Propagate(err, "creating logger encoder failed")
}
level, err := getZapLevel(config.Level)
if err != nil {
return nil, stacktrace.Propagate(err, "creating logger failed")
}
var cores []zapcore.Core
if config.StdoutEnabled {
writer := zapcore.Lock(os.Stdout)
cores = append(cores, zapcore.NewCore(encoder, writer, level))
}
if config.SyslogEnabled {
// The syslog.LOG_INFO is used here intentionally,
// since it is just a default severity if the syslog writer is used by its own.
// All zapsyslog calls will overwrite this appropriately.
// For example logger.Debug() will use syslog.LOG_DEBUG severity.
writer, err := gsyslog.NewLogger(gsyslog.LOG_INFO, config.SyslogFacility, config.SyslogTag)
if err != nil {
return nil, stacktrace.Propagate(err, "creating syslog logging backend failed")
}
cores = append(cores, NewZapSyslogCore(level, encoder, writer))
}
logger := zap.New(
zapcore.NewTee(cores...),
zap.AddCaller(),
)
return &ZapLogger{
Logger: logger,
level: level,
}, nil
}
func newEncoder(encoding string, config *zapcore.EncoderConfig) (zapcore.Encoder, error) {
switch encoding {
case EncodingJSON:
return zapcore.NewJSONEncoder(*config), nil
case EncodingPlain:
return zapcore.NewConsoleEncoder(*config), nil
default:
return nil, stacktrace.NewError("invalid encoder type: %s", encoding)
}
}
func getZapLevel(level string) (zapcore.Level, error) {
zapLevel, ok := zapLogLevels[level]
if !ok {
return zapcore.InfoLevel, stacktrace.NewError("invalid log level %s", level)
}
return zapLevel, nil
}
// Ensure that ZapLogger implements the StructuredLogger interface.
var _ StructuredLogger = (*ZapLogger)(nil)
type ZapLogger struct {
*zap.Logger
level zapcore.Level
}
func (z *ZapLogger) GetLevel() zapcore.Level {
return z.level
}
|
#!/usr/bin/env python3
from datetime import datetime
from kqueen.server import create_app
from kqueen.models import Cluster
from kqueen.models import Organization
from kqueen.models import Provisioner
from kqueen.models import User
import requests
import yaml
import os
uuid_organization = '22d8df64-4ac9-4be0-89a7-c45ea0fc85da'
uuid_jenkins = 'c88b05d6-a107-4636-a3cc-eb5c90562f8f'
uuid_local = '2d51891a-adac-4bbc-a725-eed20cc67849'
uuid_gke = '9212695e-3aad-434d-ba26-3403481d37a1'
uuid_aks = 'c02d3b7c-d06e-11e7-973c-68f72873a109'
uuid_provisioner_jenkins = 'e8de24b0-43d1-4a3c-af55-7b1d3f700554'
uuid_provisioner_local = '203c50d6-3d09-4789-8b8b-1ecb00814436'
uuid_provisioner_kubespray = '689de9a2-50e0-4fcd-b6a6-96930b5fadc9'
uuid_provisioner_gke = '516e3a8c-6c4d-49f1-8178-c6f802836618'
uuid_provisioner_aks = 'b72df8cc-d06e-11e7-97cc-68f72873a109'
kubeconfig_url = 'https://ci.mcp.mirantis.net/job/deploy-aws-k8s_ha_calico_sm/17/artifact/kubeconfig'
kubeconfig_file = 'kubeconfig_remote'
app = create_app()
with app.app_context():
# Organization and user
try:
organization = Organization(
id=uuid_organization,
name='DemoOrg',
namespace='demoorg',
created_at=datetime.utcnow()
)
organization.save()
except:
raise Exception('Adding DemoOrg organization failed')
try:
user = User.create(
None,
username='admin',
password='default',
organization=organization,
created_at=datetime.utcnow(),
role='superadmin',
active=True
)
user.save()
except:
raise Exception('Adding admin user failed')
# AWS + Jenkins
try:
provisioner = Provisioner(
user.namespace,
id=uuid_provisioner_jenkins,
name='Jenkins provisioner to AWS',
state='OK',
engine='kqueen.engines.JenkinsEngine',
parameters={
'username': 'demo',
'password': 'Demo123'
},
created_at=datetime.utcnow(),
owner=user
)
provisioner.save(check_status=False)
except:
raise Exception('Adding AWS provisioner failed')
try:
# load kubeconfig file
if os.path.isfile(kubeconfig_file):
print('Loading remote kubeconfig from {}'.format(kubeconfig_file))
kubeconfig = yaml.load(open(kubeconfig_file).read())
else:
print('Loading remote kubeconfig from {}'.format(kubeconfig_url))
kubeconfig = yaml.load(requests.get(kubeconfig_url).text)
cluster = Cluster(
user.namespace,
id=uuid_jenkins,
name='AWS kqueen testing',
state='OK',
provisioner=provisioner,
kubeconfig=kubeconfig,
created_at=datetime.utcnow(),
owner=user,
metadata={}
)
cluster.save()
except:
raise Exception('Adding AWS cluster failed')
# GKE provisioner
try:
provisioner = Provisioner(
user.namespace,
id=uuid_provisioner_gke,
name='Google Kubernetes engine',
state='OK',
engine='kqueen.engines.GceEngine',
created_at=datetime.utcnow(),
owner=user,
parameters={}
)
provisioner.save(check_status=False)
except:
raise Exception('Adding GKE provisioner failed')
try:
cluster = Cluster(
user.namespace,
id=uuid_gke,
state='OK',
name='GKE cluster, paused',
provisioner=provisioner,
created_at=datetime.utcnow(),
owner=user,
metadata={}
)
cluster.save()
except:
raise Exception('Adding GKE provisioner failed')
# AKS provisioner
try:
provisioner = Provisioner(
user.namespace,
id=uuid_provisioner_aks,
name='Azure Kubernetes Service',
state='OK',
engine='kqueen.engines.AksEngine',
created_at=datetime.utcnow(),
owner=user,
parameters={}
)
provisioner.save(check_status=False)
except:
raise Exception('Adding AKS provisioner failed')
try:
cluster = Cluster(
user.namespace,
id=uuid_aks,
state='OK',
name='AKS cluster, paused',
provisioner=provisioner,
created_at=datetime.utcnow(),
owner=user,
metadata={}
)
cluster.save()
except:
raise Exception('Adding AKS cluster failed')
# Local cluster
try:
provisioner = Provisioner(
user.namespace,
id=uuid_provisioner_local,
name='Manual provisioner',
state='OK',
engine='kqueen.engines.ManualEngine',
parameters={},
created_at=datetime.utcnow(),
owner=user
)
provisioner.save(check_status=False)
except:
raise Exception('Adding manual provisioner failed')
try:
cluster = Cluster(
user.namespace,
id=uuid_local,
name='local_cluster',
state='OK',
provisioner=provisioner,
kubeconfig=yaml.load(open('kubeconfig_localhost', 'r').read()),
created_at=datetime.utcnow(),
owner=user,
metadata={}
)
cluster.save()
except:
raise Exception('Adding local cluster failed')
# Dummy Kubespray provisioner
try:
provisioner = Provisioner(
user.namespace,
id=uuid_provisioner_kubespray,
name='Kubespray',
state='OK',
engine='kqueen.engines.ManualEngine',
created_at=datetime.utcnow(),
owner=user,
parameters={}
)
provisioner.save(check_status=False)
except:
raise Exception('Adding manual provisioner failed')
|
package org.komapper.core.dsl.expression
import kotlin.reflect.KClass
internal sealed class AggregateFunction<T : Any, S : Any> : ScalarExpression<T, S> {
internal data class Avg(val expression: ColumnExpression<*, *>) : ColumnExpression<Double, Double>,
AggregateFunction<Double, Double>() {
override val owner: TableExpression<*> get() = expression.owner
override val exteriorClass: KClass<Double> get() = Double::class
override val interiorClass: KClass<Double> = Double::class
override val columnName: String get() = expression.columnName
override val alwaysQuote: Boolean get() = expression.alwaysQuote
override val masking: Boolean get() = expression.masking
override val wrap: (Double) -> Double = { it }
override val unwrap: (Double) -> Double = { it }
}
internal object CountAsterisk : ColumnExpression<Long, Long>, AggregateFunction<Long, Long>() {
override val owner: TableExpression<*> get() = throw UnsupportedOperationException()
override val exteriorClass: KClass<Long> get() = Long::class
override val interiorClass: KClass<Long> get() = Long::class
override val columnName: String get() = throw UnsupportedOperationException()
override val alwaysQuote: Boolean get() = throw UnsupportedOperationException()
override val masking: Boolean get() = throw UnsupportedOperationException()
override val wrap: (Long) -> Long = { it }
override val unwrap: (Long) -> Long = { it }
}
internal data class Count(val expression: ColumnExpression<*, *>) : ColumnExpression<Long, Long>,
AggregateFunction<Long, Long>() {
override val owner: TableExpression<*> get() = expression.owner
override val exteriorClass: KClass<Long> get() = Long::class
override val interiorClass: KClass<Long> = Long::class
override val columnName: String get() = expression.columnName
override val alwaysQuote: Boolean get() = expression.alwaysQuote
override val masking: Boolean get() = expression.masking
override val wrap: (Long) -> Long = { it }
override val unwrap: (Long) -> Long = { it }
}
internal data class Max<T : Any, S : Any>(val expression: ColumnExpression<T, S>) :
ColumnExpression<T, S> by expression,
AggregateFunction<T, S>()
internal data class Min<T : Any, S : Any>(val expression: ColumnExpression<T, S>) :
ColumnExpression<T, S> by expression,
AggregateFunction<T, S>()
internal data class Sum<T : Any, S : Any>(val expression: ColumnExpression<T, S>) :
ColumnExpression<T, S> by expression,
AggregateFunction<T, S>()
}
|
#!/bin/bash
echo "enter any value: "
read var
echo "\"$var\" was entered"
|
module Genova
module Config
class DeployConfig < BaseConfig
def validate!
schema = File.read(Rails.root.join('lib', 'genova', 'config', 'validator', 'deploy_config.json'))
errors = JSON::Validator.fully_validate(schema, @params)
raise Exceptions::ValidationError, errors[0] if errors.size.positive?
end
def cluster(cluster)
values = (@params[:clusters] || []).find { |k| k[:name] == cluster }
raise Exceptions::ValidationError, "Cluster is undefined. [#{cluster}]" if values.nil?
values
end
def run_task(cluster, run_task)
run_tasks = cluster(cluster)[:run_tasks] || {}
values = run_tasks[run_task.to_sym]
raise Exceptions::ValidationError, "Run task is undefined. [#{run_task}]" if values.nil?
values
end
def service(cluster, service)
services = cluster(cluster)[:services] || {}
values = services[service.to_sym]
raise Exceptions::ValidationError, "Service is undefined. [#{service}]" if values.nil?
values
end
def target(target)
values = (@params[:targets] || []).find { |k| k[:name] == target }
raise Exceptions::ValidationError, "Target is undefined. [#{target}]" if values.nil?
values
end
end
end
end
|
module Main where
import qualified Pure
import qualified Monadic
import qualified Generic
main :: IO ()
main = do
putStrLn "\n== Pure ================================\n"
Pure.tests
putStrLn "\n== Monadic =============================\n"
Monadic.tests
putStrLn "\n== Generic =============================\n"
Generic.tests
putStrLn "\n========================================\n"
|
<?php
/**
* @package Soisy
*/
namespace Soisy\Includes;
use Soisy\SoisyClient;
class Helper
{
public static function isCorrectAmount($order_total): bool
{
return ($order_total >= SoisyClient::MIN_AMOUNT) && ($order_total <= SoisyClient::MAX_AMOUNT);
}
public static function htmlPriceToNumber(string $price): float
{
if (stripos($price, '</del>') !== false) {
$price = explode('</del>', $price)[1];
}
$price = strip_tags($price);
$price = self::cleanPriceByChar('€', $price);
$price = self::cleanPriceByChar(' ', $price);
$price = preg_replace('/[^\d,\.]+/', '', $price);
if (self::hasDecimals($price)) {
return self::getFloatValue($price);
}
return floatval(preg_replace('/[^\d]/', '', $price));
}
public static function getFloatValue(string $price): float
{
return intval(preg_replace('/[^\d]/', '', $price)) / 100;
}
public static function cleanPriceByChar(string $character, string $price): string
{
$parts = explode($character, $price);
foreach ($parts as $i => $part) {
$parts[$i] = trim($part);
}
return implode($character, array_unique($parts));
}
public static function hasDecimals(string $price): bool
{
$char = self::getDecimalPointChar($price);
if (!empty($char)) {
$priceParts = explode($char, $price);
if (isset($priceParts[1]) && $priceParts[1] !== '' && intval($priceParts[1]) >= 0) {
return true;
}
}
return false;
}
public static function getDecimalPointChar(string $price): string
{
if ($price[strlen($price)-1] === '.' || $price[strlen($price)-1] === ',') {
$price = substr($price, 0, strlen($price)-1);
}
$dotPos = strpos($price, '.');
$commaPos = strpos($price, ',');
$decimalSeparatorPos = strlen($price) - 3;
if ($dotPos === false && $commaPos === false) {
return '';
}
if ($dotPos === false && $commaPos !== false) {
if ($commaPos === $decimalSeparatorPos) {
return ',';
}
return '';
}
if ($dotPos !== false && $commaPos === false) {
if ($dotPos === $decimalSeparatorPos) {
return '.';
}
return '';
}
return $dotPos < $commaPos ? ',' : '.';
}
public static function isSoisyLoanQuoteCalculatedAlready(string $price): bool
{
return strpos($price, '<soisy-loan-quote') !== false;
}
}
|
import carl.gen_fingerprint as gf
import carl.viz_fingerprint as viz
# site = "http://yahoo.com"
site = "http://slate.com"
fp, res = gf.load_fingerprint("real.first_n.1.0")
yh_1 = res[site]
fp, res = gf.load_fingerprint("real.first_n.5.0")
yh_5 = res[site]
fp, res = gf.load_fingerprint("real.first_n.10.0")
yh_10 = res[site]
fp, res = gf.load_fingerprint("real.first_n.20.0")
yh_20 = res[site]
yh_dict = {"n=1": yh_1, "n=5": yh_5, "n=10": yh_10, "n=20": yh_20}
site_profile, x_max = gf.time_order_false_positive_percent(yh_dict)
viz.site_over_time(site_profile, x_max, "site_line_many.png", "")
|
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace YooniK.Face.Client.Models.Responses
{
/// <summary>
/// ProcessResponse
/// </summary>
[DataContract]
public partial class ProcessResponse
{
[DataMember(Name = "biometric_type", EmitDefaultValue = false)]
public string BiometricType { get; set; }
[DataMember(Name = "x", EmitDefaultValue = false)]
public double X { get; set; } // detection center coordinate x
[DataMember(Name = "Y", EmitDefaultValue = false)]
public double Y { get; set; } // detection center coordinate y
[DataMember(Name = "width", EmitDefaultValue = false)]
public double Width { get; set; } // detection bounding box width
[DataMember(Name = "height", EmitDefaultValue = false)]
public double Height { get; set; } // detection bounding box height
[DataMember(Name = "Z", EmitDefaultValue = false)]
public double? Z { get; set; } // detection center 3D coordinate Z
[DataMember(Name = "matching_score", EmitDefaultValue = false)]
public double? MatchingScore { get; set; } // Matching score obtained with template_id person
#nullable enable
[DataMember(Name = "template", EmitDefaultValue = false)]
public string? Template { get; set; } // Biometric template
[DataMember(Name = "template_version", EmitDefaultValue = false)]
public string? TemplateVersion { get; set; } // Template version
[DataMember(Name = "matching_image", EmitDefaultValue = false)]
public string? MatchingImage { get; set; } // Thumbnail/crop used for template extraction
[DataMember(Name = "tracking_id", EmitDefaultValue = false)]
public string? TrackingId { get; set; } // Tracking id.Available when processing video.
[DataMember(Name = "template_id", EmitDefaultValue = false)]
public string? TemplateId { get; set; } // Template Id
[DataMember(Name = "quality_metrics", EmitDefaultValue = false)]
public List<QualityMetrics>? QualityMetrics { get; set; }
[DataMember(Name = "biometric_points", EmitDefaultValue = false)]
public List<BiometricPoints>? BiometricPoints { get; set; }
#nullable disable
}
[DataContract]
public partial class QualityMetrics
{
[DataMember(Name = "value", EmitDefaultValue = false)]
public double value { get; set; } // Metric value.
[DataMember(Name = "@enum", EmitDefaultValue = false)]
#nullable enable
public string? Enum { get; set; } // String with metric value for enumerables.
[DataMember(Name = "bottom_threshold", EmitDefaultValue = false)]
#nullable disable
public double? BottomThreshold { get; set; } // Bottom threshold.
[DataMember(Name = "top_threshold", EmitDefaultValue = false)]
public double? TopThreshold { get; set; } // Top threshold.
[DataMember(Name = "test", EmitDefaultValue = false)]
public bool Test { get; set; } // Metric test according to threshold.
[DataMember(Name = "name", EmitDefaultValue = false)]
public string Name { get; set; } // Metric name.
}
[DataContract]
public partial class BiometricPoints
{
[DataMember(Name = "x")]
public int X { get; set; } // Point x coordinate.
[DataMember(Name = "Y")]
public int Y { get; set; } // Point y coordinate.
[DataMember(Name = "name")]
public string Name { get; set; } // Point coordinate name.
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
component_manager_lib::{
model::{
self,
hooks::*,
testing::breakpoints::*,
testing::test_hook::{Lifecycle, TestHook},
},
startup,
},
failure::{Error, ResultExt},
fuchsia_async as fasync, fuchsia_syslog as syslog,
std::sync::Arc,
};
// TODO: This is a white box test so that we can use hooks. Really this should be a black box test,
// but we need to implement stopping and/or external hooks for that to be possible.
#[fasync::run_singlethreaded(test)]
async fn destruction() -> Result<(), Error> {
syslog::init_with_tags(&[]).context("could not initialize logging")?;
// Set up model and hooks.
let root_component_url =
"fuchsia-pkg://fuchsia.com/destruction_integration_test#meta/collection_realm.cm"
.to_string();
let args = startup::Arguments {
use_builtin_process_launcher: false,
use_builtin_vmex: false,
root_component_url,
};
let model = startup::model_setup(&args, vec![]).await?;
let test_hook = TestHook::new();
let breakpoint_registry = Arc::new(BreakpointRegistry::new());
let breakpoint_receiver =
breakpoint_registry.register(vec![EventType::PostDestroyInstance]).await;
let breakpoint_hook = BreakpointHook::new(breakpoint_registry.clone());
model.root_realm.hooks.install(test_hook.hooks()).await;
model.root_realm.hooks.install(breakpoint_hook.hooks()).await;
model.look_up_and_bind_instance(model::AbsoluteMoniker::root()).await?;
// Wait for `coll:root` to be destroyed.
breakpoint_receiver
.wait_until(EventType::PostDestroyInstance, vec!["coll:root:1"].into())
.await;
// Assert that root component has no children.
let children: Vec<_> = model
.root_realm
.lock_state()
.await
.as_ref()
.expect("not resolved")
.all_child_realms()
.keys()
.map(|m| m.clone())
.collect();
assert!(children.is_empty());
// Assert the expected lifecycle events. The leaves can be stopped/destroyed in either order.
let mut events: Vec<_> = test_hook
.lifecycle()
.into_iter()
.filter_map(|e| match e {
Lifecycle::Stop(_) | Lifecycle::Destroy(_) => Some(e),
_ => None,
})
.collect();
let mut next: Vec<_> = events.drain(0..2).collect();
next.sort_unstable();
let expected: Vec<_> = vec![
Lifecycle::Stop(vec!["coll:root:1", "trigger_a:0"].into()),
Lifecycle::Stop(vec!["coll:root:1", "trigger_b:0"].into()),
];
assert_eq!(next, expected);
let next: Vec<_> = events.drain(0..1).collect();
assert_eq!(next, vec![Lifecycle::Stop(vec!["coll:root:1"].into())]);
let mut next: Vec<_> = events.drain(0..2).collect();
next.sort_unstable();
let expected: Vec<_> = vec![
Lifecycle::Destroy(vec!["coll:root:1", "trigger_a:0"].into()),
Lifecycle::Destroy(vec!["coll:root:1", "trigger_b:0"].into()),
];
assert_eq!(next, expected);
assert_eq!(events, vec![Lifecycle::Destroy(vec!["coll:root:1"].into())]);
Ok(())
}
|
package com.jamieadkins.gwent.domain.deck
object DeckConstants {
const val BASE_PROVISION_ALLOWANCE = 150
const val CARDS_IN_DECK = 25
const val BRONZE_MAX = 2
const val GOLD_MAX = 1
}
|
namespace :data_migrations do
desc "Create a race for each user, for each of their characters with a `race` value"
task create_races_from_character_races: :environment do
User.all.each do |user|
puts "Migrating user #{user.email.split('@').first}..."
user.characters.where.not(race: "").each do |character|
race = character.race
puts "\tCreating race #{race}"
new_race = user.races.where(name: race).first_or_create
character.races << new_race
end
end
puts " All done now!"
end
desc "Create a default 'English' language for all users"
task create_english_language_for_all_users: :environment do
User.all.each do |user|
puts "Adding language to #{user.email.split('@').first}"
user.languages.create(name: 'English')
end
end
desc "Create a default 'Human' race for all users"
task create_human_race_for_all_users: :environment do
User.all.each do |user|
puts "Adding human race to #{user.email.split('@').first}"
user.races.where(name: 'Human').first_or_create
end
end
end
|
// Copyright (C) 2018 Peter Wong. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
esid: pending
description: Custom species constructor is called when creating internal RegExp
info: |
RegExp.prototype [ @@matchAll ] ( string )
[...]
3. Return ? MatchAllIterator(R, string).
MatchAllIterator ( R, O )
[...]
2. If ? IsRegExp(R) is true, then
a. Let C be ? SpeciesConstructor(R, RegExp).
b. Let flags be ? ToString(? Get(R, "flags"))
c. Let matcher be ? Construct(C, R, flags).
features: [Symbol.matchAll, Symbol.species]
includes: [compareArray.js, compareIterator.js, regExpUtils.js]
---*/
var callCount = 0;
var callArgs;
var regexp = /\d/u;
regexp.constructor = {
[Symbol.species]: function(){
callCount++;
callArgs = arguments;
return /\w/g;
}
};
var str = 'a*b';
var iter = regexp[Symbol.matchAll](str);
assert.sameValue(callCount, 1);
assert.sameValue(callArgs.length, 2);
assert.sameValue(callArgs[0], regexp);
assert.sameValue(callArgs[1], 'u');
assert.compareIterator(iter, [
matchValidator(['a'], 0, str),
matchValidator(['b'], 2, str)
]);
|
---
title: Тахикардия
date: '22:08 15-12-2015'
publish_date: '22:08 15-12-2015'
taxonomy:
category:
- Blog
tag:
- Жизнь
---
Пульс 101 в покое. Перебор.
|
require 'test_helper'
module Pyper::Pipes::Cassandra
class PaginationEncodingTest < Minitest::Should::TestCase
setup do
@pipe = PaginationEncoding.new
end
should 'encode the :paging_state status' do
state = 'sdf'
encoded = Base64.urlsafe_encode64(state)
status = {paging_state: state}
@pipe.pipe([], status)
assert_equal encoded, status[:paging_state]
end
should 'allow missing paging states' do
status = {}
@pipe.pipe([], status)
assert_equal nil, status[:paging_state]
end
should 'not modify the items' do
items = %w(a b)
assert_equal items, @pipe.pipe(items, {})
end
end
end
|
require "test_helper"
require "expedite/variants"
class VariantsTest < Minitest::Test
def test_register
assert_raises ::NotImplementedError do
Expedite::Variants.lookup("missing")
end
Expedite::Variants.register("dev/*") do |name|
assert name != nil
end
["dev/abc", "dev/bcd"].each do |name|
v = Expedite::Variants.lookup(name)
assert v != nil
v.after_fork(name)
end
assert_raises ::NotImplementedError do
Expedite::Variants.lookup("dev")
end
Expedite::Variants.reset
assert_raises ::NotImplementedError do
Expedite::Variants.lookup("dev/abc")
end
# Should be able to re-register
Expedite::Variants.register("dev/*")
ensure
Expedite::Variants.reset
end
end
|
@push('head_scripts')
<script src="https://js.pusher.com/7.0/pusher.min.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script>
@endpush
|
/*
* Copyright (c) 2016-2018 "Neo4j Sweden, AB" [https://neo4j.com]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Attribution Notice under the terms of the Apache License 2.0
*
* This work was created by the collective efforts of the openCypher community.
* Without limiting the terms of Section 6, any Derivative Work that is not
* approved by the public consensus process of the openCypher Implementers Group
* should not be described as “Cypher” (and Cypher® is a registered trademark of
* Neo4j Inc.) or as "openCypher". Extensions by implementers or prototypes or
* proposals for change that have been documented or implemented should only be
* described as "implementation extensions to Cypher" or as "proposed changes to
* Cypher that are not yet approved by the openCypher community".
*/
package org.opencypher.okapi.relational.refactor.syntax
import org.opencypher.okapi.relational.refactor.classes.Register
import scala.language.implicitConversions
object RegisterSyntax extends RegisterSyntax
trait RegisterSyntax {
def key[D, K](defn: D)(implicit register: Register[_] { type Def = D; type Key = K }): register.Key =
register.key(defn)
implicit def registerSyntax[C, R, K, D](coll: C)(
implicit
register: Register[C] { type Ref = R; type Key = K; type Def = D }): RegisterOps[C, R, K, D] =
new RegisterOps[C, R, K, D](coll)
}
final class RegisterOps[C, R, K, D](coll: C)(
implicit
val register: Register[C] { type Ref = R; type Key = K; type Def = D }) {
def contents: Traversable[(R, D)] = register.contents(coll)
def lookup(ref: R): Option[D] = register.lookup(coll, ref)
def find(defn: D): Option[R] = register.find(coll, defn)
def findByKey(key: K): Option[R] = register.findByKey(coll, key)
def insert(defn: D): Either[R, (Option[C], R)] = register.insert(coll, defn)
def update(ref: R, defn: D): Either[R, C] = register.update(coll, ref, defn)
def remove(ref: R): Option[C] = register.remove(coll, ref)
}
|
+++
Title = "Heidi Waterhouse"
Twitter = "wiredferret"
image = "heidi-waterhouse.jpg"
type = "speaker"
linktitle = "heidi-waterhouse"
+++
|
#ifndef AARCH64_OS_LIB_STRING_H
#define AARCH64_OS_LIB_STRING_H
#include "kernel.h"
void *memcpy(void *dst, const void *src, u64 n);
void *memmove(void *dst, const void *src, u64 n);
void *memset(void *dst, int c, u64 n);
int strcmp(const char *s1, const char *s2);
u64 strlen(const char *s);
char *strcpy(char *dst, const char *src);
#endif
|
package com.daimler.mbcarkit.socket.observable
import com.daimler.mbcarkit.business.model.services.ServiceActivationStatusUpdateWrapper
import com.daimler.mbnetworkkit.socket.message.ObservableMessage
internal class ServiceActivationObservableMessage(
update: ServiceActivationStatusUpdateWrapper
) : ObservableMessage<ServiceActivationStatusUpdateWrapper>(update)
|
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace MiMundo
{
public partial class Form1 : Form
{
Image uno = null;
int x, y;
int fx, fy;
int img = 0;
double vel = 0.01;
int tiempo = 200;
int fila=0;
public Form1()
{
InitializeComponent();
uno = Image.FromFile("Recurso\\yoshi.png");
x = y = 0;
fx = fy = 0;
tick.Enabled = true;
tick.Start();
}
private void Lienzo_Paint(object sender, PaintEventArgs e)
{
Graphics g = e.Graphics;
g.DrawImage(uno, new Rectangle(x, y, 40, 40), fx+img*40,fy+fila*40,40,40, GraphicsUnit.Pixel);
}
private void Form1_KeyUp(object sender, KeyEventArgs e)
{
fila = 0;
}
private void Form1_KeyPress(object sender, KeyPressEventArgs e)
{
int distancia = (int)(vel * tiempo);
if (e.KeyChar == 'a')
{
x -=distancia;
fila = 1;
}
if (e.KeyChar == 'd')
{
x += distancia;
fila = 1;
}
}
private void tick_Tick(object sender, EventArgs e)
{
img++;
if (img > 3) img = 0;
CollisionCheck();
Lienzo.Invalidate();
}
private void CollisionCheck()
{
}
}
}
|
import {
Body,
Controller,
Delete,
Get,
HttpCode,
HttpStatus,
Param,
Post,
Put,
Query,
UseGuards,
} from '@nestjs/common';
import { ApiOperation, ApiTags } from '@nestjs/swagger';
import { MongoIdPipe } from 'src/common/mongo-id.pipe';
import { ProductsService } from 'src/products/services/products.service';
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
import { Public } from 'src/auth/decorators/public.decorator';
import {
CreateProductDto,
FilterProductsDto,
UpdateProductDto,
} from 'src/products/dto/product.dto';
import { Roles } from 'src/auth/decorators/role.decorator';
import { Role } from 'src/auth/models/roles.model';
import { RolesGuard } from 'src/auth/guards/roles.guard';
@UseGuards(JwtAuthGuard, RolesGuard)
@ApiTags('products')
@Controller('products')
export class ProductsController {
constructor(private productService: ProductsService) {}
@ApiOperation({ summary: 'List of all products' })
@Public()
@Get()
getAll(@Query() params: FilterProductsDto) {
return this.productService.findAll(params);
}
@Public()
@Get(':productId')
@HttpCode(HttpStatus.ACCEPTED)
get(@Param('productId', MongoIdPipe) productId: string) {
return this.productService.findOne(productId);
}
@Roles(Role.ADMIN)
@Post()
create(@Body() payload: CreateProductDto) {
return this.productService.create(payload);
}
@Put(':id')
update(
@Param('id', MongoIdPipe) id: string,
@Body() payload: UpdateProductDto,
) {
return this.productService.update(id, payload);
}
@Delete(':id')
delete(@Param('id', MongoIdPipe) id: string) {
return this.productService.delete(id);
}
}
|
<?php
namespace Biplane\YandexDirect\Api\V4\Contract;
/**
* Auto-generated code.
*/
class CreditLimitsInfo
{
protected $Currency = null;
protected $Limits = [];
/**
* Creates a new instance of CreditLimitsInfo.
*
* @return self
*/
public static function create()
{
return new self();
}
/**
* Gets Currency.
*
* @return string
*/
public function getCurrency()
{
return $this->Currency;
}
/**
* Sets Currency.
*
* @param string $value
* @return $this
*/
public function setCurrency($value)
{
$this->Currency = $value;
return $this;
}
/**
* Gets Limits.
*
* @return CreditLimitsItem[]
*/
public function getLimits()
{
return $this->Limits;
}
/**
* Sets Limits.
*
* @param CreditLimitsItem[] $value
* @return $this
*/
public function setLimits(array $value)
{
$this->Limits = $value;
return $this;
}
}
|
# frozen_string_literal: true
ActiveAdmin.register Subdomain do
belongs_to :domain
navigation_menu :domain
actions :all, except: %i[destroy edit new show]
config.filters = false
index do
column :id
column :title
column :domain
column :name
column 'Score Units', :score_units do |subdomain|
link_to subdomain.score_units.size.to_s, admin_subdomain_score_units_path(subdomain.id)
end
end
end
|
# v2ex热议话题
`最后更新时间:2021-08-27 23:10:45 +0800`
1. [准备参加公司的开发者大赛,帮忙起一个名字吧](https://www.v2ex.com/t/798281)
1. [队伍里的小伙子这样设计表,应该怎么评价](https://www.v2ex.com/t/798305)
1. [问下 免压水晶头 = 免打模块 + 成品网线?](https://www.v2ex.com/t/798266)
1. [厦门互联网公司黑名单有人发出来了,直接在线编辑 500 余人,战况激励](https://www.v2ex.com/t/798356)
1. [CNNIC 报告显示中国网站数量逐年下降,微信公众号应该「居功至伟」](https://www.v2ex.com/t/798288)
1. [FaceTime & 微信视频 谁更强?](https://www.v2ex.com/t/798310)
1. [不背单词,遇见单词](https://www.v2ex.com/t/798373)
1. [真的很喜欢这种复古风格的日记软件](https://www.v2ex.com/t/798289)
1. [大家洗手是用洗手液还是香皂?有没有什么推荐的?](https://www.v2ex.com/t/798235)
|
#
# clickJSDialog.rb
#
#
# This file contains the JS clicker when it runs as a separate process
require 'watir/winClicker'
button = "OK"
button = ARGV[0] unless ARGV[0] == nil
sleepTime = 0
sleepTime = ARGV[1] unless ARGV[1] == nil
clicker= WinClicker.new
result = clicker.clickJavaScriptDialog( button )
clicker = nil
|
## 0.1.1 - July 2, 2013
- Fix regexp anchoring
## 0.1.0 - April 23, 2012
- Initial version
|
experimentFolder=$1
for exp in `ls ${experimentFolder}` ; do
bsub -P heberlein \
-o duotrax-${exp}.log \
singularity run \
-B /groups/heberlein/heberleinlab/Simon:/groups/heberlein/heberleinlab/Simon \
-B /groups/branson/home/leea30/jsp/settings:/groups/branson/home/leea30/jsp/settings \
-B ${experimentFolder}:${experimentFolder} \
docker://registry.int.janelia.org/heberlein/duotrax:1.0 \
-e ${experimentFolder}/${exp} \
-xml /groups/heberlein/heberleinlab/Simon/Code/ConfigFiles/Clstr3R_params.xml \
-s /groups/branson/home/leea30/jsp/settings/base
done
|
package bo.app;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.github.ddth.dao.BaseBo;
import utils.PngUtils;
public class AppBo extends BaseBo {
public final static AppBo[] EMPTY_ARRAY = new AppBo[0];
public final static AppBo newInstance() {
AppBo user = new AppBo();
return user;
}
public final static AppBo newInstance(String id) {
AppBo user = newInstance();
user.setId(id);
return user;
}
/*----------------------------------------------------------------------*/
private final static String ATTR_ID = "id";
private final static String ATTR_IS_DISABLED = "disabled";
private final static String ATTR_API_KEY = "api_key";
private final static String ATTR_IOS_P12_CONTENT = "ios_p12";
private final static String ATTR_IOS_P12_PASSWORD = "ios_p12_pwd";
@JsonIgnore
public String getId() {
return getAttribute(ATTR_ID, String.class);
}
public AppBo setId(String id) {
setAttribute(ATTR_ID, id != null ? id.trim().toLowerCase() : null);
return this;
}
@JsonIgnore
public boolean isDisabled() {
Integer value = getAttribute(ATTR_IS_DISABLED, Integer.class);
return value != null ? value.intValue() > 0 : false;
}
public AppBo setDisabled(int value) {
setAttribute(ATTR_IS_DISABLED, value != 0 ? 1 : 0);
return this;
}
public AppBo setDisabled(boolean value) {
setAttribute(ATTR_IS_DISABLED, value ? 1 : 0);
return this;
}
@JsonIgnore
public String getApiKey() {
return getAttribute(ATTR_API_KEY, String.class);
}
public AppBo setApiKey(String apiKey) {
setAttribute(ATTR_API_KEY, apiKey != null ? apiKey.trim().toLowerCase() : "");
return this;
}
@JsonIgnore
private byte[] iOSP12ContentRaw = null;
@JsonIgnore
public byte[] getIOSP12ContentRaw() {
if (iOSP12ContentRaw == null) {
iOSP12ContentRaw = PngUtils.base64Decode(getIOSP12Content());
}
return iOSP12ContentRaw;
}
@JsonIgnore
public String getIOSP12Content() {
return getAttribute(ATTR_IOS_P12_CONTENT, String.class);
}
public AppBo setIOSP12Content(String iOSP12Content) {
String content = iOSP12Content != null ? iOSP12Content.trim() : "";
setAttribute(ATTR_IOS_P12_CONTENT, content);
iOSP12ContentRaw = PngUtils.base64Decode(content);
return this;
}
public AppBo setIOSP12Content(byte[] iOSP12ContentRaw) {
String content = PngUtils.base64Encode(iOSP12ContentRaw);
this.iOSP12ContentRaw = iOSP12ContentRaw;
setAttribute(ATTR_IOS_P12_CONTENT, content);
return this;
}
@JsonIgnore
public String getIOSP12Password() {
return getAttribute(ATTR_IOS_P12_PASSWORD, String.class);
}
public AppBo setIOSP12Password(String iOSP12Password) {
setAttribute(ATTR_IOS_P12_PASSWORD, iOSP12Password != null ? iOSP12Password.trim() : "");
return this;
}
}
|
<?php
/**
* Blackbox tests for all extensions, compares imagesizes/filesizes
**/
class CacheUncachedTest extends \PHPUnit\Framework\TestCase
{
// {{{ setUp
/**
* setup function
**/
public function setUp():void
{
$this->cache = \Depage\Cache\Cache::factory("test", array(
'disposition' => 'uncached',
));
}
// }}}
// {{{ testSetGet
/**
* Tests basic getter and setter
**/
public function testSetGetSimpleString()
{
$var = "This is a test content";
$key = "test";
$this->cache->set($key, $var);
$this->assertFalse($this->cache->get($key));
}
// }}}
// {{{ testExists
/**
* Tests basic exists test
**/
public function testExists()
{
$var = "This is a test content";
$key1 = "test1";
$key2 = "test2";
$this->cache->set($key1, $var);
$this->cache->set($key2, $var);
$this->cache->delete($key2, $var);
$this->assertFalse($this->cache->exist($key1));
$this->assertFalse($this->cache->exist($key2));
}
// }}}
// {{{ testDelete
/**
* Tests basic return value for unset keys
**/
public function testDelete()
{
$key = "key";
$content = "This is the content";
$this->cache->set($key, $content);
$this->cache->delete($key);
$this->assertFalse($this->cache->get($key));
}
// }}}
// {{{ testNonExistant
/**
* Tests basic return value for unset keys
**/
public function testNonExistant()
{
$this->assertFalse($this->cache->get("key"));
}
// }}}
// {{{ testClear
/**
* Tests clear function
**/
public function testClear()
{
$key = "key";
$content = "This is the content";
$this->cache->set($key, $content);
$this->cache->clear();
$this->assertFalse($this->cache->get($key));
}
// }}}
}
/* vim:set ft=php sts=4 fdm=marker et : */
|
require 'spec_helper'
describe PerfLab do
let(:lab) do
described_class.configure do |config|
config.existing -> { 100.times; 100 }
config.improved -> { 10.times; 10 }
config.equality ->(_existing_result, _improved_result) { true }
end
end
describe 'profiling' do
let(:directory) { 'spec' }
let(:filename) { 'perflab-profiler-test.dump' }
let(:path) { "#{directory}/#{filename}" }
before do
stub_const('PerfLab::Profiler::DIRECTORY', directory)
stub_const('PerfLab::Profiler::FILENAME', filename)
end
describe '.profile' do
it 'profiles the improved lambda and creates an output file' do
lab.profile
expect(File.exist?(path)).to eq(true)
end
end
describe '.profile_existing' do
it 'profiles the existing lambda and creates an output file' do
lab.profile_existing
expect(File.exist?(path)).to eq(true)
end
end
after do
FileUtils.rm(path)
end
end
describe '.bmbm' do
it 'creates a benchmark with existing and improved lambdas' do
results = lab.bmbm
expect(results.size).to eq(2)
results.each do |result|
expect(result).to be_a(Benchmark::Tms)
end
end
end
describe '.bmbm_improved' do
it 'creates a benchmark only with the improved lambda' do
results = lab.bmbm_improved
expect(results.size).to eq(1)
expect(results.first).to be_a(Benchmark::Tms)
end
end
describe '.ips' do
it 'creates a benchmark with existing and improved lambdas' do
result = lab.ips
expect(result).to be_a(Benchmark::IPS::Report)
end
end
describe '.ips_improved' do
it 'creates a benchmark only with improved lambda' do
result = lab.ips_improved
expect(result).to be_a(Benchmark::IPS::Report)
end
end
describe '.ipsa' do
it 'creates a benchmark with existing and improved lambdas' do
result = lab.ipsa
expect(result).to be_a(Benchmark::IPS::Report)
end
end
describe '.ipsa_improved' do
it 'creates a benchmark only with improved lambda' do
result = lab.ipsa_improved
expect(result).to be_a(Benchmark::IPS::Report)
end
end
describe 'equality' do
context 'when the equality is provided' do
it 'runs the lambda to verify correctness' do
expect(lab).to be_correct
end
end
context 'when the equality is not provided' do
it 'compares the return values of the existing and improved lambdas to verify correctness' do
lab = described_class.configure do |config|
config.existing -> { 100.times; 100 }
config.improved -> { 10.times; 10 }
end
expect(lab).to_not be_correct
end
end
end
describe 'performance improvement calculation' do
context 'when improved is faster' do
it 'prints a message saying improved is faster' do
described_class::Util.print_performance_improvement(10, 5)
end
end
context 'when improved is slower' do
it 'prints a message saying improved is slower' do
described_class::Util.print_performance_improvement(10, 15)
end
end
context 'when there is not difference' do
it 'prints a message saying there is no difference' do
described_class::Util.print_performance_improvement(10, 10)
end
end
end
end
|
program flocator
!
!@(#) a routine to demonstrate using locator.
!
!(LICENSE:PD)
use M_draw
character(len=20) :: dev
integer bt, BLACK, GREEN, BLUE
real x, y, sx, sy
logical act, curpnt
parameter (BLACK = 0, GREEN = 2, BLUE = 4)
print*,'Enter device name:'
read(*,'(a)') dev
call vinit(dev)
call color(BLACK)
call clear
call color(BLUE)
!
! draw some axes
!
call move2(0.0, 1.0)
call draw2(0.0, -1.0)
call move2(1.0, 0.0)
call draw2(-1.0, 0.0)
call color(GREEN)
act = .false.
curpnt = .false.
!
! locator returns whether a mouse button has been
! pressed or not. In a device such as the tektronix
! where you have to wait for a keypress to get the
! position of the crosshairs locator returns 0
! automatically on every second call. A return value
! of 2 indicates the second mouse button has been pressed.
! A return value of 1 indicates the first mouse button has
! been pressed. We wait for the locator to return zero so
! that we know the mouse button has been released.
!
1 continue
bt = locator(x, y)
if (bt .eq. -1) then
call vexit
print*,'No locator device found'
stop
else if (bt .eq. 2) then
call vexit
stop
else if (bt .eq. 0) then
act = .true.
else if (act) then
act = .false.
if (bt .eq. 1) then
if (curpnt) then
call move2(sx, sy)
call draw2(x, y)
curpnt = .false.
else
curpnt = .true.
end if
sx = x
sy = y
end if
end if
goto 1
end program flocator
|
package com.common.util.excel;
import com.common.util.domain.ExcelData;
import com.google.common.base.Joiner;
import lombok.extern.slf4j.Slf4j;
import org.apache.poi.hssf.usermodel.*;
import org.apache.poi.ss.usermodel.Font;
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.xssf.usermodel.XSSFCellStyle;
import org.apache.poi.xssf.usermodel.XSSFColor;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.apache.poi.xssf.usermodel.extensions.XSSFCellBorder;
import javax.servlet.http.HttpServletResponse;
import java.awt.Color;
import java.io.BufferedOutputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Slf4j(topic = "ExportExcelUtils")
public class ExportExcelUtils {
public static void exportExcel(HttpServletResponse response, String fileName, ExcelData data) throws Exception {
// 告诉浏览器用什么软件可以打开此文件
response.setHeader("content-Type", "application/vnd.ms-excel");
// 下载文件的默认名称
response.setHeader("Content-Disposition", "attachment;filename="+ URLEncoder.encode(fileName, "utf-8"));
exportExcel(data, response.getOutputStream());
}
public static void exportExcel(ExcelData data, OutputStream out) throws Exception {
XSSFWorkbook wb = new XSSFWorkbook();
try {
String sheetName = data.getName();
if (null == sheetName) {
sheetName = "Sheet1";
}
XSSFSheet sheet = wb.createSheet(sheetName);
writeExcel(wb, sheet, data);
wb.write(out);
} finally {
wb.close();
}
}
private static void writeExcel(XSSFWorkbook wb, Sheet sheet, ExcelData data) {
int rowIndex = 0;
rowIndex = writeTitlesToExcel(wb, sheet, data.getTitles());
writeRowsToExcel(wb, sheet, data.getRows(), rowIndex);
autoSizeColumns(sheet, data.getTitles().size() + 1);
}
private static int writeTitlesToExcel(XSSFWorkbook wb, Sheet sheet, List<String> titles) {
int rowIndex = 0;
int colIndex = 0;
Font titleFont = wb.createFont();
titleFont.setFontName("simsun");
titleFont.setBold(true);
// titleFont.setFontHeightInPoints((short) 14);
titleFont.setColor(IndexedColors.BLACK.index);
XSSFCellStyle titleStyle = wb.createCellStyle();
titleStyle.setAlignment(HorizontalAlignment.CENTER);
titleStyle.setVerticalAlignment(VerticalAlignment.CENTER);
titleStyle.setFillForegroundColor(new XSSFColor(new Color(182, 184, 192)));
titleStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND);
titleStyle.setFont(titleFont);
setBorder(titleStyle, BorderStyle.THIN, new XSSFColor(new Color(0, 0, 0)));
Row titleRow = sheet.createRow(rowIndex);
// titleRow.setHeightInPoints(25);
colIndex = 0;
for (String field : titles) {
Cell cell = titleRow.createCell(colIndex);
cell.setCellValue(field);
cell.setCellStyle(titleStyle);
colIndex++;
}
rowIndex++;
return rowIndex;
}
private static int writeRowsToExcel(XSSFWorkbook wb, Sheet sheet, List<List<Object>> rows, int rowIndex) {
int colIndex = 0;
Font dataFont = wb.createFont();
dataFont.setFontName("simsun");
// dataFont.setFontHeightInPoints((short) 14);
dataFont.setColor(IndexedColors.BLACK.index);
XSSFCellStyle dataStyle = wb.createCellStyle();
dataStyle.setAlignment(HorizontalAlignment.CENTER);
dataStyle.setVerticalAlignment(VerticalAlignment.CENTER);
dataStyle.setFont(dataFont);
setBorder(dataStyle, BorderStyle.THIN, new XSSFColor(new Color(0, 0, 0)));
for (List<Object> rowData : rows) {
Row dataRow = sheet.createRow(rowIndex);
// dataRow.setHeightInPoints(25);
colIndex = 0;
for (Object cellData : rowData) {
Cell cell = dataRow.createCell(colIndex);
if (cellData != null) {
cell.setCellValue(cellData.toString());
} else {
cell.setCellValue("");
}
cell.setCellStyle(dataStyle);
colIndex++;
}
rowIndex++;
}
return rowIndex;
}
private static void autoSizeColumns(Sheet sheet, int columnNumber) {
for (int i = 0; i < columnNumber; i++) {
int orgWidth = sheet.getColumnWidth(i);
sheet.autoSizeColumn(i, true);
int newWidth = (int) (sheet.getColumnWidth(i) + 100);
if (newWidth > orgWidth) {
sheet.setColumnWidth(i, newWidth);
} else {
sheet.setColumnWidth(i, orgWidth);
}
}
}
private static void setBorder(XSSFCellStyle style, BorderStyle border, XSSFColor color) {
style.setBorderTop(border);
style.setBorderLeft(border);
style.setBorderRight(border);
style.setBorderBottom(border);
style.setBorderColor(XSSFCellBorder.BorderSide.TOP, color);
style.setBorderColor(XSSFCellBorder.BorderSide.LEFT, color);
style.setBorderColor(XSSFCellBorder.BorderSide.RIGHT, color);
style.setBorderColor(XSSFCellBorder.BorderSide.BOTTOM, color);
}
public static void exportExcel(HttpServletResponse response, ExcelData data) {
log.info("导出解析开始,fileName:{}",data.getFileName());
try {
//实例化HSSFWorkbook
HSSFWorkbook workbook = new HSSFWorkbook();
//创建一个Excel表单,参数为sheet的名字
HSSFSheet sheet = workbook.createSheet("sheet");
//设置表头
List<String> titles = data.getTitles();
setTitle(workbook, sheet, titles.toArray(new String[titles.size()]));
//设置单元格并赋值
setData(sheet, data.getRows());
//设置浏览器下载
setBrowser(response, workbook, data.getFileName());
log.info("导出解析成功!");
} catch (Exception e) {
log.info("导出解析失败!");
e.printStackTrace();
}
}
private static void setTitle(HSSFWorkbook workbook, HSSFSheet sheet, String[] str) {
try {
HSSFRow row = sheet.createRow(0);
//设置列宽,setColumnWidth的第二个参数要乘以256,这个参数的单位是1/256个字符宽度
for (int i = 0; i <= str.length; i++) {
sheet.setColumnWidth(i, 15 * 256);
}
//设置为居中加粗,格式化时间格式
HSSFCellStyle style = workbook.createCellStyle();
HSSFFont font = workbook.createFont();
font.setBold(true);
style.setFont(font);
style.setDataFormat(HSSFDataFormat.getBuiltinFormat("m/d/yy h:mm"));
//创建表头名称
HSSFCell cell;
for (int j = 0; j < str.length; j++) {
cell = row.createCell(j);
cell.setCellValue(str[j]);
cell.setCellStyle(style);
}
} catch (Exception e) {
log.info("导出时设置表头失败!");
e.printStackTrace();
}
}
private static void setData(HSSFSheet sheet, List<List<Object>> data) {
try{
int rowNum = 1;
for (int i = 0; i < data.size(); i++) {
HSSFRow row = sheet.createRow(rowNum);
for (int j = 0; j < data.get(i).size(); j++) {
row.createCell(j).setCellValue(String.valueOf(data.get(i).get(j)));
}
rowNum++;
}
log.info("表格赋值成功!");
}catch (Exception e){
log.info("表格赋值失败!");
e.printStackTrace();
}
}
private static void setBrowser(HttpServletResponse response, HSSFWorkbook workbook, String fileName) {
try {
//清空response
response.reset();
//设置response的Header
response.addHeader("Content-Disposition", "attachment;filename=" + fileName);
OutputStream os = new BufferedOutputStream(response.getOutputStream());
response.setContentType("application/vnd.ms-excel;charset=gb2312");
//将excel写入到输出流中
workbook.write(os);
os.flush();
os.close();
log.info("设置浏览器下载成功!");
} catch (Exception e) {
log.info("设置浏览器下载失败!");
e.printStackTrace();
}
}
public static List<Object[]> importExcel(String fileName) {
log.info("导入解析开始,fileName:{}",fileName);
try {
List<Object[]> list = new ArrayList<>();
InputStream inputStream = new FileInputStream(fileName);
Workbook workbook = WorkbookFactory.create(inputStream);
Sheet sheet = workbook.getSheetAt(0);
//获取sheet的行数
int rows = sheet.getPhysicalNumberOfRows();
for (int i = 0; i < rows; i++) {
//过滤表头行
// if (i == 0) {
// continue;
// }
//获取当前行的数据
Row row = sheet.getRow(i);
Object[] objects = new Object[row.getPhysicalNumberOfCells()];
int index = 0;
for (Cell cell : row) {
if (cell.getCellType().equals(CellType.NUMERIC)) {
objects[index] = (int) cell.getNumericCellValue();
}
if (cell.getCellType().equals(CellType.STRING)) {
objects[index] = cell.getStringCellValue();
}
if (cell.getCellType().equals(CellType.BOOLEAN)) {
objects[index] = cell.getBooleanCellValue();
}
if (cell.getCellType().equals(CellType.ERROR)) {
objects[index] = cell.getErrorCellValue();
}
index++;
}
list.add(objects);
}
log.info("导入文件解析成功!");
return list;
}catch (Exception e){
log.info("导入文件解析失败!");
e.printStackTrace();
}
return null;
}
public static void main(String[] args) {
String fileName = Thread.currentThread().getContextClassLoader().getResource("excel/table.xlsx").getPath();
List<Object[]> objects = importExcel(fileName);
Joiner joiner = Joiner.on(",")
// 排除null值
.skipNulls();
String collect = objects.stream()
.map(r -> "T1." + r[0])
.collect(Collectors.joining(","));
String collect2 = objects.subList(0, 5).stream()
.map(r -> "T1." + r[0] + " = " + "T2." + r[0])
.collect(Collectors.joining(","));
System.out.println("=================collect================");
System.out.println(collect);
System.out.println("=================collect2================");
System.out.println(collect2);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.