text
stringlengths 27
775k
|
---|
using Godot;
using System;
public class Chopper : Peripheral
{
private Area hitArea; private Godot.Collections.Array<Robot> bodiesInRange = new Godot.Collections.Array<Robot>();
private Vector3 impactPoint;
/*Signal*/public void hitAreabodyEnteredExit(Node body){
//if( !(body is Robot) || body.Equals(parent) ) return;
bodiesInRange.Clear();
impactPoint = Vector3.Zero;
Godot.Collections.Array tempBodies = hitArea.GetOverlappingBodies();
foreach(Node b in tempBodies){
if(isEnemy(b) && !b.Equals(parent) )
bodiesInRange.Add( (Robot)b );
}
if(bodiesInRange.Count>0)
impactPoint = bodiesInRange[Global.RandInt(0, bodiesInRange.Count)].Translation;
}
private Sparks sparkParticles;
private Spatial blades;
private float rotvel = 0;
public override void _Ready()
{
ram = new byte[1]{0};
base._Ready();
blades = GetNode<Spatial>("MainMesh/Blades");
hitArea = GetNode<Area>("HitScan/Area");
sparkParticles = GetNode<Sparks>("HitScan/Sparks");
}
public override void tickLogical(float delta)
{
rotvel = Mathf.MoveToward(rotvel, (ram[0]==0 ? 0f : 1f), delta );
if(Global.FRAME%2==0 && bodiesInRange.Count>0 && ram[0]>0){ // inflict damage
for(int i = 0; i < bodiesInRange.Count<Robot>(); i++){
if( bodiesInRange[i]==null )
bodiesInRange.Remove(bodiesInRange[i]);
}
sparkParticles.playImpact(impactPoint, bodiesInRange, 2);
}
}
public override void tickPresentational(float delta)
{
blades.Rotation += new Vector3(0,0.2f,0) * rotvel;
}
}
|
using C2048.WebExtends.MVC;
using BLL;
using Model;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Common;
namespace C2048.Controllers
{
[SkipLogin]
public class LoginController : BaseController
{
public ActionResult Login(string uid, string pwd)
{
//登陆信息
var info = new LoginBLL().LoginIn(uid, pwd);
//登陆前的游客游戏
var ykdata = CacheExts<GameInfo>.GetValue(LoginInfoModel.ykCookie);
if (ykdata == null)
{
ykdata = new GameInfo();
new GameInfoBLL().RefNum(ref ykdata);
}
if (info != null)
{
var data= CacheExts<GameInfo>.GetValue(info.UserID);
if (data==null||data.Score < ykdata.Score)
{
//如果此用户不存在游戏记录,或用户的游戏分数低于游客状态下的得分,则用游客的游戏状态覆盖用户的游戏状态
CacheExts<GameInfo>.SetValue(info.UserID, ykdata,noSlidingExpiration:false);
}
return Json(new { Code = 2000, Msg = "登陆成功" }, JsonRequestBehavior.AllowGet);
}
else
{
return Json(new { Code = 2001, Msg = "登陆失败" }, JsonRequestBehavior.AllowGet);
}
}
}
}
|
package org.openfact.services.resources.admin;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import javax.inject.Inject;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.openfact.models.EmisorModel;
import org.openfact.services.managers.AppAuthManager;
import org.openfact.theme.BrowserSecurityHeaderSetup;
import org.openfact.theme.FreeMarkerException;
import org.openfact.theme.FreeMarkerUtil;
import org.openfact.theme.Theme;
import org.openfact.utils.MediaType;
public class AdminConsoleImpl implements AdminConsole {
@Context
private UriInfo uriInfo;
@Override
public Response config() {
// TODO Auto-generated method stub
return null;
}
@Override
public Response whoAmI(HttpHeaders headers) {
// TODO Auto-generated method stub
return null;
}
@Override
public Response logout() {
// TODO Auto-generated method stub
return null;
}
@Override
public Response getMainPage() throws URISyntaxException, IOException {
if (!uriInfo.getRequestUri().getPath().endsWith("/")) {
return Response.status(302).location(uriInfo.getRequestUriBuilder().path("/").build()).build();
} else {
Theme theme = null;/*AdminRoot.getTheme(session, realm);*/
Map<String, Object> map = new HashMap<>();
URI baseUri = uriInfo.getBaseUri();
String authUrl = baseUri.toString();
authUrl = authUrl.substring(0, authUrl.length() - 1);
/*map.put("authUrl", authUrl);
map.put("resourceUrl", Urls.themeRoot(baseUri) + "/admin/" + theme.getName());
map.put("resourceVersion", Version.RESOURCES_VERSION);
map.put("properties", theme.getProperties());*/
FreeMarkerUtil freeMarkerUtil = new FreeMarkerUtil();
String result = null;
try {
result = freeMarkerUtil.processTemplate(map, "index.ftl", theme);
} catch (FreeMarkerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Response.ResponseBuilder builder = Response.status(Response.Status.OK)
.type(MediaType.TEXT_HTML_UTF_8).language(Locale.ENGLISH).entity(result);
//BrowserSecurityHeaderSetup.headers(builder, realm);
return builder.build();
}
}
@Override
public Response getIndexHtmlRedirect() {
return Response.status(302).location(uriInfo.getRequestUriBuilder().path("../").build()).build();
}
@Override
public Properties getMessages(String lang) {
// TODO Auto-generated method stub
return null;
}
}
|
using System;
using System.Threading;
namespace PhotoSyncLib.Interface
{
public interface IPhotoSyncEngine
{
void AddImagePath(string path);
void LoadImageMetadata(IProgress<IProgressValue> progress, CancellationToken ct);
}
public interface IPhotoSet
{
}
}
|
# Please Read first the READ.md for instructions.
for directory in /var/www/*;
do cd "$directory" &&
/usr/local/bin/wp core update;
/usr/local/bin/wp plugin update --all;
/usr/local/bin/wp theme update --all;
/usr/local/bin/wp core language update;
/usr/local/bin/wp transient delete --expired;
done
|
def convert_hsl_to_rgb(hue: float, sat: float, lum: float, max_input=255.0, max_output=255.0):
"""Converts HSI or HSL colors into RGB.
Accepts hue, sat, and lum as floats or ints, defaulting to 0.0-255.0 range.
Returns RGB as a list of three floats, defaulting to 0.0-255.0 range.
Change max_input and max_output to scale this function to your needs;
for instance, if your input HSI values are in the range of 0.0-1.0, set max_input to 1.0,
and if you want to output RGB as 0.0-1.0 values, set max_output to 1.0.
"""
def normalize(value):
"""limits input values to the range of 0.0 through max_input"""
return (lambda n: max(min(max_input, n), 0.0))(value)
hue = normalize(hue) * (360/max_input) # converts to 0.0-360.0
sat = normalize(sat) / max_input # converts to 0.0-1.0
lum = normalize(lum) / max_input # converts to 0.0-1.0
c = lum * sat
x = c * (1 - abs((hue / 60.0) % 2 - 1))
hue_to_rgb_options = {
0: [c, x, 0],
1: [x, c, 0],
2: [0, c, x],
3: [0, x, c],
4: [x, 0, c],
5: [c, 0, x],
}
hue_option = round((hue - (hue % 60)) / 60) % 6
rgb_template = hue_to_rgb_options[hue_option]
rgb = [(n + (lum - c)) * max_output for n in rgb_template]
return rgb
|
#!/usr/bin/env bash
# code snippet by https://github.com/rockyshimithy
current_coverage=$(cat COVERAGE)
new_coverage=$(cat xmlcov/coverage.xml | sed -rn 's/.*coverage.*line-rate="([^"]*)".*/\1/p')
echo "Current coverage: $current_coverage"
echo "New coverage: $new_coverage"
evaluation=$(python -c "print($new_coverage >= $current_coverage)")
if [ $evaluation == "True" ]; then
echo "Success during the test coverage";
else
echo "The coverage needs be greater or equal than the current coverage"
exit 1
fi
|
class LibXmlRubyXXE < ApplicationController
content = params[:xml]
LibXML::XML::Document.string(content, { options: 2 | 2048, encoding: 'utf-8' })
LibXML::XML::Document.file(content, { options: LibXML::XML::Parser::Options::NOENT | 2048})
LibXML::XML::Document.io(content, { options: XML::Parser::Options::NOENT | 2048 })
LibXML::XML::Parser.string(content, { options: 2 | 2048 })
LibXML::XML::Parser.file(content, { options: 3 | 2048 })
LibXML::XML::Parser.io(content, { options: 2 | 2048})
XML::Document.string(content, { options: 2 | 2048 })
XML::Parser.string(content, { options: 2 | 2048 })
LibXML::XML::Parser.file(content, { options: 2048 }) # OK
end
|
<?php
require_once 'Zend/Gdata/App/MediaSource.php';
abstract class Zend_Gdata_App_BaseMediaSource implements Zend_Gdata_App_MediaSource
{
protected $_contentType = null;
protected $_slug = null;
public function getContentType()
{
return $this->_contentType;
}
public function setContentType($value)
{
$this->_contentType = $value;
return $this;
}
public function getSlug(){
return $this->_slug;
}
public function setSlug($value){
$this->_slug = $value;
return $this;
}
public function __get($name)
{
$method = 'get'.ucfirst($name);
if (method_exists($this, $method)) {
return call_user_func(array(&$this, $method));
} else if (property_exists($this, "_${name}")) {
return $this->{'_' . $name};
} else {
require_once 'Zend/Gdata/App/InvalidArgumentException.php';
throw new Zend_Gdata_App_InvalidArgumentException(
'Property ' . $name . ' does not exist');
}
}
public function __set($name, $val)
{
$method = 'set'.ucfirst($name);
if (method_exists($this, $method)) {
return call_user_func(array(&$this, $method), $val);
} else if (isset($this->{'_' . $name}) || is_null($this->{'_' . $name})) {
$this->{'_' . $name} = $val;
} else {
require_once 'Zend/Gdata/App/InvalidArgumentException.php';
throw new Zend_Gdata_App_InvalidArgumentException(
'Property ' . $name . ' does not exist');
}
}
public function __isset($name)
{
$rc = new ReflectionClass(get_class($this));
$privName = '_' . $name;
if (!($rc->hasProperty($privName))) {
require_once 'Zend/Gdata/App/InvalidArgumentException.php';
throw new Zend_Gdata_App_InvalidArgumentException(
'Property ' . $name . ' does not exist');
} else {
if (isset($this->{$privName})) {
if (is_array($this->{$privName})) {
if (count($this->{$privName}) > 0) {
return true;
} else {
return false;
}
} else {
return true;
}
} else {
return false;
}
}
}
}
|
import { RequestContext } from '@zetapush/core';
import { CloudServiceInstance } from '@zetapush/common';
export const inject = (instance: any, requestContext: RequestContext) =>
new Proxy(instance, {
get: (target: any, property: string): any => {
if (property === 'requestContext') {
return requestContext;
} else if (property === 'requestContextId') {
return requestContext.contextId;
} else {
const value = target[property];
// Injected service
if (CloudServiceInstance.is(value)) {
return inject(value, requestContext);
} else {
return value;
}
}
}
});
|
#---------------------------------------------------------------------------
# bash script to build Altair's docs
#
# we run this first with Python 2.7 to correctly create image thumbnails
# (this relies on nodejs tools that fail in Python 3.5)
# and then run again in Python 3.5 to get the final doc build.
#
# Usage: bash ./build_docs.sh # must run from altair root directory
#---------------------------------------------------------------------------
# first build docs using 2.7; this is required for nodejs tools
conda create --yes -n altair-docs-27 python=2.7 || echo "conda 2.7 env exists"
source activate altair-docs-27
conda install --yes --file requirements.txt --channel conda-forge
conda install --yes --file doc/requirements.txt
# nodejs (vega-lite & canvas) & cairo are required for building thumbnails:
conda install --yes cairo nodejs --channel conda-forge
node -p "require('vega-lite/package.json').version" || npm install canvas vega-lite
# install altair & build docs
python setup.py install
cd doc
make clean
make html
cd ..
#------------------------------------------------------------------
# next re-build docs using 3.5; this will use thumbnails from above
conda create --yes -n altair-docs-35 python=3.5 || echo "conda 3.5 env exists"
source activate altair-docs-35
conda install --yes --file requirements.txt --channel conda-forge
conda install --yes --file doc/requirements.txt
# install altair & build docs
python setup.py install
cd doc
make clean
make html
cd ..
|
import React from "react";
import Avatar from "../index";
import renderer from "react-test-renderer";
test("The Avatar should be rendered properly", () => {
const component = renderer.create(
<Avatar src="https://www.gravatar.com/avatar" />
);
expect(component).toMatchSnapshot();
});
test("The Avatar should adapt to the size prop", () => {
let component = renderer.create(
<Avatar size="small" src="https://www.gravatar.com/avatar" />
);
expect(component).toMatchSnapshot();
component = renderer.create(
<Avatar size="medium" src="https://www.gravatar.com/avatar" />
);
expect(component).toMatchSnapshot();
component = renderer.create(
<Avatar size="large" src="https://www.gravatar.com/avatar" />
);
expect(component).toMatchSnapshot();
});
test("The Avatar should adapt to the rounded prop", () => {
const component = renderer.create(
<Avatar rounded src="https://www.gravatar.com/avatar" />
);
expect(component).toMatchSnapshot();
});
|
# frozen_string_literal: true
FactoryBot.define do
factory :custom_error, class: Errors::CustomError do
title { 'Server Error' }
detail { 'A server error occured.' }
status { '500' }
end
end
|
Spree Reorder (Repeat the last order)
============
A Spree 3.0 extension to repeat the last order in a single click of button
## Installation
1. Add this extension to your Gemfile with this line:
```ruby
gem 'spree_reorder', github: 'spkprav/spree_reorder', branch: '3-0-stable'
```
The `branch` option is important: it must match the version of Spree you're using.
For example, use `3-0-stable` if you're using Spree `3-0-stable` or any `3.0.x` version.
2. Install the gem using Bundler:
```ruby
bundle install
```
3. Restart your server
If your server was running, restart it so that it can find the assets properly.
|
// @Title: 把数字翻译成字符串 (把数字翻译成字符串 LCOF)
// @Author: Singularity0909
// @Date: 2020-10-11 23:40:14
// @Runtime: 0 ms
// @Memory: 5.8 MB
class Solution {
public:
int cnt;
void dfs(const string& str, int cur, int last)
{
if (cur == str.length()) {
++cnt;
return;
}
dfs(str, cur + 1, cur);
if (cur + 1 != str.length() and str[last + 1] != '0' and stoi(str.substr(last + 1, cur - last + 1)) < 26) {
dfs(str, cur + 1, last);
}
}
int translateNum(int num)
{
string str(to_string(num));
dfs(str, 0, -1);
return cnt;
}
};
|
export PATH=${PATH}:$HOME/.lua:$HOME/.local/bin
bash .travis/setup_lua.sh
|
use serde::{Serialize, Deserialize};
use super::battle::BattleResult;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Batlog { // nananananananana bat log (battle log)
pub fights: Vec<Battle>,
count: i32,
pub round_no: i32,
}
impl Batlog {
pub fn new(round_no: i32) -> Self {
Batlog {
fights: Vec::new(),
count: 0,
round_no,
}
}
pub fn advance_to_next_battle(&mut self, f1: usize, f2: usize) {
self.fights.push(Battle::new(f1, f2))
}
pub fn set_rolls(&mut self, rolls: Vec<i32>) { // i dont know why i did it this way but i did
let i = self.fights.len() - 1;
if self.count == 0 {
self.fights[i].rolls_1 = rolls;
self.count = 1
}
else {
self.fights[i].rolls_2 = rolls;
self.count = 0
}
}
pub fn set_injury(&mut self, injury: Option<i32>) {
let i = self.fights.len() - 1;
if self.count == 0 {
self.fights[i].injury_1 = injury;
self.count = 1
}
else {
self.fights[i].injury_2 = injury;
self.count = 0
}
}
pub fn set_points(&mut self, points: i32) {
let i = self.fights.len() - 1;
self.fights[i].points = points
}
pub fn set_result(&mut self, r: BattleResult) {
let i = self.fights.len() - 1;
self.fights[i].result = r
}
pub fn add_events(&mut self, event: String) {
let i = self.fights.len() - 1;
let e_log = &mut self.fights[i].other_events;
e_log.push(event)
}
}
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct Battle {
pub fighter_1: usize, // index into fighter list
pub rolls_1: Vec<i32>,
pub injury_1: Option<i32>, // winner gets None (usually)
pub fighter_2: usize, // index into fighter list
pub rolls_2: Vec<i32>,
pub injury_2: Option<i32>,
pub points: i32,
pub result: BattleResult,
pub other_events: Vec<String>
}
impl Battle {
fn new(fighter_1: usize, fighter_2: usize) -> Self {
Battle {
fighter_1, fighter_2,
..Battle::default()
}
}
}
|
use crate::i18n::I18NHelper;
use rand::seq::SliceRandom;
use std::error::Error;
static SPONSORS_YML_PATH: &str = "src/data/sponsors.yml";
lazy_static! {
static ref SPONSORS: Vec<Sponsor> = load_sponsors(SPONSORS_YML_PATH).unwrap();
}
#[derive(Deserialize)]
struct Sponsor {
id: String,
name: String,
}
fn load_sponsors(path: &str) -> Result<Vec<Sponsor>, Box<dyn Error>> {
Ok(serde_yaml::from_str(&std::fs::read_to_string(path)?)?)
}
#[derive(Serialize)]
pub(crate) struct RenderSponsor {
name: &'static str,
is_not_first: bool,
logo_path: String,
logo_alt_i18n: String,
description_i18n: String,
}
pub(crate) fn render_data(lang: &str) -> Vec<RenderSponsor> {
let i18n = I18NHelper::new();
let mut sponsors = SPONSORS
.iter()
.map(|s| RenderSponsor {
name: &s.name,
is_not_first: true, // Will be changed later
logo_path: format!("/static/images/sponsor-logos/{}.svg", s.id),
logo_alt_i18n: i18n.lookup(lang, &format!("sponsors-{}-alt", s.id), None),
description_i18n: i18n.lookup(lang, &format!("sponsors-{}", s.id), None),
})
.collect::<Vec<_>>();
sponsors.shuffle(&mut rand::thread_rng());
sponsors
.iter_mut()
.enumerate()
.for_each(|(i, s)| s.is_not_first = i != 0);
sponsors
}
|
module Iri.Optics.Defs
where
import Iri.Prelude
import Iri.Data
import Iri.Optics.Basics
import qualified Iri.Rendering.ByteString as A
import qualified Iri.Parsing.ByteString as B
import qualified Iri.Rendering.Text as C
import qualified Iri.Parsing.Text as D
import qualified Data.Text.Encoding as Text
-- * Definitions by source
-------------------------
-- ** Text
-------------------------
textIriIri :: Prism' Text Iri
textIriIri =
prism C.iri (\ text -> either (const (Left text)) Right (D.iri text))
textIriHttpIri :: Prism' Text HttpIri
textIriHttpIri =
textIriIri . iriHttpIri
-- ** ByteString
-------------------------
byteStringIri :: Prism' ByteString Iri
byteStringIri =
prism A.uri (\ bytes -> either (const (Left bytes)) Right (B.uri bytes))
byteStringHttpIri :: Prism' ByteString HttpIri
byteStringHttpIri =
byteStringIri . iriHttpIri
byteStringTextInUtf8 :: Prism' ByteString Text
byteStringTextInUtf8 = prism' Text.encodeUtf8 (either (const Nothing) Just . Text.decodeUtf8')
-- ** IRI
-------------------------
iriHttpIri :: Prism' Iri HttpIri
iriHttpIri = prism' iriFromHttpIri (either (const Nothing) Just . httpIriFromIri)
iriScheme :: Lens' Iri Scheme
iriScheme = lens (\ (Iri x _ _ _) -> x) (\ (Iri _ hierarchy query fragment) x -> Iri x hierarchy query fragment)
iriHierarchy :: Lens' Iri Hierarchy
iriHierarchy = lens (\ (Iri _ x _ _) -> x) (\ (Iri scheme _ query fragment) x -> Iri scheme x query fragment)
iriQuery :: Lens' Iri Query
iriQuery = lens (\ (Iri _ _ x _) -> x) (\ (Iri scheme hierarchy _ fragment) x -> Iri scheme hierarchy x fragment)
iriFragment :: Lens' Iri Fragment
iriFragment = lens (\ (Iri _ _ _ x) -> x) (\ (Iri scheme hierarchy query _) x -> Iri scheme hierarchy query x)
-- ** Scheme
-------------------------
schemeByteString :: Lens' Scheme ByteString
schemeByteString = lens (\ (Scheme x) -> x) (const Scheme)
-- ** Fragment
-------------------------
fragmentByteString :: Lens' Fragment ByteString
fragmentByteString = lens (\ (Fragment x) -> x) (const Fragment)
|
package com.gdxsoft.easyweb.script.display;
import java.util.ArrayList;
import java.util.HashMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Document;
public class HtmlCombineGrp {
private HashMap<String, ArrayList<HtmlCombineItem>> map_;
private Document combineDoc_;
private ArrayList<String> grpIndex_;
public HtmlCombineGrp(Document combineDoc) {
combineDoc_ = combineDoc;
map_ = new HashMap<String, ArrayList<HtmlCombineItem>>();
grpIndex_ = new ArrayList<String>();
}
public ArrayList<String> getGrpIndex() {
return grpIndex_;
}
public void init() {
NodeList nl = combineDoc_.getElementsByTagName("item");
// 先循环,以便将数据放到Rv中
for (int i = 0; i < nl.getLength(); i++) {
Node item = nl.item(i);
HtmlCombineItem ci = HtmlCombineItem.parseFrom(item);
if (ci.getGrp() != null) {
if (!map_.containsKey(ci.getGrp())) {
map_.put(ci.getGrp(), new ArrayList<HtmlCombineItem>());
grpIndex_.add(ci.getGrp());
}
map_.get(ci.getGrp()).add(ci);
}
}
}
public HashMap<String, ArrayList<HtmlCombineItem>> getMap() {
return map_;
}
public Document getCombineDoc() {
return combineDoc_;
}
}
|
package com.blog.service.impl;
import com.blog.dao.ViewsDao;
import com.blog.service.ViewsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class ViewsServiceImpl implements ViewsService {
@Autowired
private ViewsDao viewsDao;
@Override
public int getAllViewsForPeople() {
int views = viewsDao.getAllViewsForPeople();
return views;
}
@Override
public int getYesterdayViewsForPeople() {
return viewsDao.getYesterdayViewsForPeople();
}
@Override
public int getAWeekViewsForPeople() {
return viewsDao.getAWeekViewsForPeople();
}
@Override
public int getAMonthViewsForPeople() {
return viewsDao.getAMonthViewsForPeople();
}
@Override
public int getSpanViewsForPeople() {
return 0;
}
@Override
public void updateViewsForPeople() {
viewsDao.updateViewsForPeople();
}
}
|
import * as app from '..';
import * as mobx from 'mobx';
export class MainSettingsViewModel {
@mobx.action
changePageSize(pageSize: app.PageSize) {
if (pageSize === this.pageSize) return;
this.pageSize = pageSize;
localStorage.setItem('SessionPageSize', String(this.pageSize));
}
@mobx.action
toggleDialog() {
this.showDialog = !this.showDialog;
}
@mobx.action
toggleOptionOneHanded() {
this.optionOneHanded = !this.optionOneHanded;
localStorage.setItem('SessionOptionOneHanded', String(this.optionOneHanded));
}
@mobx.action
toggleOptionRightToLeft() {
this.optionRightToLeft = !this.optionRightToLeft;
localStorage.setItem('SessionOptionRightToLeft', String(this.optionRightToLeft));
}
@mobx.observable
optionOneHanded = localStorage.getItem('SessionOptionOneHanded') === 'true';
@mobx.observable
optionRightToLeft = localStorage.getItem('SessionOptionRightToLeft') !== 'false';
@mobx.observable
pageSize = <app.PageSize> parseFloat(localStorage.getItem('SessionPageSize') || '0');
@mobx.observable
showDialog = false;
}
|
@extends('common.admin')
@section('content')
<div class="mws-panel grid_8">
<div class="mws-panel-header">
<span><i class="icon-table"></i> 轮播图列表</span>
</div>
<div class="mws-panel-body no-padding">
<table class="mws-table">
<tr>
<th>编号</th>
<th>标题</th>
<th>预览</th>
<th>存放路径</th>
<th>操作</th>
</tr>
@foreach ($rs as $k=>$v)
<tr>
<td>{{$v->id}}</td>
<td>{{$v->title}}</td>
<td>
<img src="{{$v->url}}" width="100">
</td>
<td>{{$v->url}}</td>
<td>
<a href="/admin/slideshows/{{$v->id}}/edit" class="btn btn-info">修改</a>
<form action="/admin/slideshows/{{$v->id}}" method="post" style="display: inline;">
{{csrf_field()}}
{{method_field('DELETE')}}
<button class="btn btn-danger" >删除</button>
</form>
</td>
</tr>
@endforeach
</table>
</div>
</div>
@stop
|
export declare function uniq<T>(array: T[]): T[];
export declare function castArray<T>(value: T): unknown[];
export declare function isUsableColor(color: string, values: string | {
[key: string]: string;
}): boolean;
export declare const round: (num: number) => string;
export declare const rem: (px: number) => string;
export declare const em: (px: number, base: number) => string;
|
#!/usr/local/env zsh
if test $(which npm); then
source <(npm completion)
fi
|
class Solution {
int MatchGroup(const vector<int>& group, const vector<int>& nums, int index) {
for (int start = index; start <= nums.size() - group.size(); start++) {
bool match = true;
for (int l = 0; l < group.size(); l++) {
if (group[l] != nums[start+l]) {
match = false;
break;
}
}
if (match) {
return start + group.size();
}
}
return -1;
}
public:
bool canChoose(vector<vector<int>>& groups, vector<int>& nums) {
int index = 0;
for (const vector<int>& group : groups) {
index = MatchGroup(group, nums, index);
if (index < 0) {
return false;
}
}
return true;
}
};
|
import React, {useEffect, useState} from 'react';
import './css/App.css';
import LineChartCO2 from './Components/LineChartCO2';
import {BrowserRouter, Route, Switch} from 'react-router-dom';
import {Col, Container, Row} from 'react-bootstrap';
import axios from 'axios';
function Home() {
const [sensors, setSensors] = useState([]);
useEffect(() => {
const fetchSensors = async () => {
const options = {
method: 'get',
url: '/api/objects',
time: 5000
};
return await axios(options);
};
fetchSensors().then((response) => {
setSensors(response.data.sensors);
}).catch((error) => {
console.log(error);
});
}, []);
const getSensors = () => {
return sensors.map((e) => {
return (
<Col key={e.name} style={{paddingBottom: '30px'}} sm={12}>
<LineChartCO2 sensor={
{name: e.name, oid: e.oid}
}/>
</Col>
);
});
};
return (
<Container>
<h2>TinyMesh AS - VAS CO2</h2>
<Row>
{getSensors()}
</Row>
</Container>
);
}
function App() {
return (
<BrowserRouter>
<Switch>
<Route path="/" exact component={Home}/>
</Switch>
</BrowserRouter>
);
}
export default App;
|
module GitBlog
module Parsers
def self.fix_pres string
string.gsub %r!([ \t]*)<pre>(.*?)</pre>!m do |match|
match.gsub(/^#{$1}/, '')
end
end
end
end
|
- [Kafdrop – Kafka Web UI](https://github.com/obsidiandynamics/kafdrop)
```bash
$ git clone https://github.com/obsidiandynamics/kafdrop
$ cd kafdrop
$ helm template -n geek-apps kafdrop chart \
--set image.tag=3.27.0 \
--set kafka.brokerConnect=kafka:9092 \
--set server.servlet.contextPath="/" \
--set cmdArgs="--message.format=AVRO --schemaregistry.connect=http://localhost:8080" \
--set jvm.opts="-Xms32M -Xmx1284M" > kafdrop-deploy.yaml
$ kubectl apply -f templates/kafdrop-deploy.yaml
```
```bash
$ open http://kafdrop.geek-apps.svc.cluster.local
```
|
<?php
namespace Dg482\Red\Builders\Form\Fields\Values;
/**
* Class FieldValues
* @package Dg482\Red\Values
*/
class FieldValues
{
protected array $values = [];
/**
* @param FieldValue $value
* @return $this
*/
public function push(FieldValue $value): FieldValues
{
if (!$this->has($value)) {
array_push($this->values, $value);
}
return $this;
}
/**
* @param FieldValue $value
* @return $this
*/
public function unshift(FieldValue $value): FieldValues
{
if (!$this->has($value)) {
array_unshift($this->values, $value);
}
return $this;
}
/**
*
*/
public function clear(): void
{
$this->values = [];
}
/**
* @param FieldValue $value
* @return bool
*/
protected function has(FieldValue $value): bool
{
$result = array_filter($this->values, function (FieldValue $val) use ($value) {
return ($value->getId() === $val->getId());
});
return (count($result) > 0);
}
/**
* @param FieldValue $value
*/
protected function update(FieldValue $value): void
{
array_map(function (FieldValue $val) use ($value) {
if ($value->getId() === $val->getId()) {
$val->setValue($value->getValue());
}
}, $this->values);
}
/**
* @param array $values
*/
public function updateValues(array $values): void
{
array_map(function (array $value) {
$this->update((new StringValue((int) $value['id'], (string) $value['value'])));
}, $values);
}
/**
* @return array
*/
public function getValues(): array
{
return $this->values;
}
/**
* @param int $id
* @return StringValue|null
*/
public function getValueById(int $id): ?StringValue
{
$result = array_filter($this->values, function (FieldValue $val) use ($id) {
return ($id === $val->getId());
});
return (count($result)) ? current($result) : null;
}
}
|
/**
*
*/
package fr.imie.jdbc.DAO;
import java.sql.Connection;
import java.util.List;
import fr.imie.jdbc.DTO.PersonneDTO;
import fr.imie.jdbc.ipersistence.IPersonneDAO;
import fr.imie.jdbc.itransactional.ITransation;
/**
* @author imie
*
*/
public class ProxyPersonneDAO implements ITransation, IPersonneDAO {
/*
* Ici le proxy est en charge de la gestion de la connection
*
* */
private IPersonneDAO realPersonneDAO = null;
public ProxyPersonneDAO(IPersonneDAO realPersonneDAO)
{
super();
this.realPersonneDAO = realPersonneDAO;
}
@Override
public void setConnection(Connection connection) {
throw new UnsupportedOperationException("Pas sur le proxy");
}
@Override
public Connection getConnection() {
throw new UnsupportedOperationException("Pas sur le proxy");
}
@Override
public List<PersonneDTO> findAll() {
return realPersonneDAO.findAll();
}
@Override
public PersonneDTO findById(PersonneDTO dto) {
return realPersonneDAO.findById(dto);
}
@Override
public PersonneDTO insert(PersonneDTO dto) {
return realPersonneDAO.insert(dto);
}
@Override
public PersonneDTO update(PersonneDTO dto) {
return realPersonneDAO.update(dto);
}
@Override
public void delete(PersonneDTO dto) {
realPersonneDAO.delete(dto);
}
@Override
public PersonneDTO update(PersonneDTO dto, Connection connectionCaller) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<PersonneDTO> findByDTO(PersonneDTO findParameter) {
return realPersonneDAO.findByDTO(findParameter);
}
@Override
public List<PersonneDTO> findByDTO(PersonneDTO findParameter, Connection connectionCaller) {
// TODO Auto-generated method stub
return null;
}
}
|
*AZ-104*
_AZ-900_
**Soon should get AZ-400**
__This will also be bold__
_You **can** combine them_
|
using Jammo.ParserTools;
using Microsoft.CodeAnalysis.Text;
namespace Jammo.TextAnalysis
{
public static class IndexSpanHelper
{
public static IndexSpan FromTextSpan(TextSpan textSpan)
{
return new IndexSpan(textSpan.Start, textSpan.End);
}
}
}
|
# Script to regenerate data/data.yml. This is only used in the gem's development.
require 'rubygems'
require 'nokogiri'
require 'open-uri'
require 'yaml'
zip_to_state = {}
doc = Nokogiri::HTML(open("http://en.wikipedia.org/wiki/ZIP_code_prefixes"))
# puts doc
doc.css('#bodyContent table td b').each do |code|
puts "zip prefix: #{code.content}\n"
if code.content.match /^[0-9]{3}/
# first three digits of the zip and the state. ex. 384 NJ
a = code.content.split
if a.length == 2
@current_zip = a[0].to_s
zip_to_state[@current_zip] = {:state => a[1][0..1]} unless a[1].length < 2 or @current_zip == "001"
end
else
# the city name ex. New York
# Make sure there are no line breaks in the name (one city has a line break)
zip_to_state[@current_zip][:city] = code.content.delete("\n")
end
end
# puts zip_to_state.to_yaml
File.open(File.join(File.dirname(__FILE__), '..', 'data', 'data.yml'), "w") do |f|
f.write zip_to_state.to_yaml
end
# Double check that the data can be loaded properly.
data = YAML.load(File.open(File.join(File.dirname(__FILE__), '..', 'data', 'data.yml')))
puts data
|
package response
import (
"github.com/ArtisanCloud/PowerWeChat/v2/src/kernel/response"
)
type ResponseOperationGetPerformance struct {
*response.ResponseMiniProgram
DefaultTimeData string `json:"default_time_data"`
CompareTimeData string `json:"compare_time_data"`
}
|
<?php
declare(strict_types=1);
return [
"xyz" => "xyz",
"param" => "Param2: %param1%",
];
?>
|
module.exports = function (input) {
let coords
if (!input) return undefined
if (!isNaN(input.xmin)) return input
if (Array.isArray(input)) {
if (Array.isArray(input[0])) coords = input
else coords = [[input[0], input[1]], [input[2], input[3]]]
} else {
throw new Error('invalid extent passed in metadata')
}
return {
xmin: coords[0][0],
ymin: coords[0][1],
xmax: coords[1][0],
ymax: coords[1][1],
spatialReference: {
wkid: 4326,
latestWkid: 4326
}
}
}
|
import 'package:flutter/material.dart';
import 'package:wings/core/immutable/base/widgets/widget.wings.dart';
import 'package:wings/features/index/watcher/index.watcher.dart';
class IndexWidget extends WingsWidget {
final dynamic controller;
IndexWidget({Key? key, this.controller})
: super(key: key, watcher: IndexWatcher(controller: controller));
@override
Widget successState(BuildContext context) {
return const Text('Wings Widget Example');
}
}
|
package com.chinazyjr.haollyv2.ui.login.view
import com.chinazyjr.haollyv2.base.IBaseView
import com.chinazyjr.haollyv2.entity.login.LoginBean
import com.chinazyjr.haollyv2.entity.login.TokenBean
/**
* Created by niudeyang on 2017/12/8.
*/
interface RegisterView :IBaseView{
fun showIamgeCode(tokenBean: TokenBean)
fun countDown(b: Boolean)
fun showSucess()
}
|
use crate::Repository;
#[cfg(all(feature = "unstable", feature = "git-worktree"))]
pub use git_worktree::*;
///
#[cfg(feature = "git-index")]
pub mod open_index {
use crate::bstr::BString;
/// The error returned by [`Worktree::open_index()`][crate::Worktree::open_index()].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error("Could not interpret value '{}' as 'index.threads'", .value)]
ConfigIndexThreads {
value: BString,
#[source]
err: git_config::value::parse::Error,
},
#[error(transparent)]
IndexFile(#[from] git_index::file::init::Error),
}
}
///
#[cfg(feature = "git-index")]
pub mod excludes {
use std::path::PathBuf;
/// The error returned by [`Worktree::excludes()`][crate::Worktree::excludes()].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error("Could not read repository exclude.")]
Io(#[from] std::io::Error),
#[error(transparent)]
EnvironmentPermission(#[from] git_sec::permission::Error<PathBuf, git_sec::Permission>),
}
}
/// A structure to make the API more stuctured.
pub struct Platform<'repo> {
pub(crate) parent: &'repo Repository,
}
/// Access
impl<'repo> crate::Worktree<'repo> {
/// Returns the root of the worktree under which all checked out files are located.
pub fn root(&self) -> &std::path::Path {
self.path
}
}
impl<'repo> crate::Worktree<'repo> {
/// Configure a file-system cache checking if files below the repository are excluded.
///
/// This takes into consideration all the usual repository configuration.
// TODO: test
#[cfg(feature = "git-index")]
pub fn excludes<'a>(
&self,
index: &'a git_index::State,
overrides: Option<git_attributes::MatchGroup<git_attributes::Ignore>>,
) -> Result<git_worktree::fs::Cache<'a>, excludes::Error> {
let repo = self.parent;
let case = repo
.config
.ignore_case
.then(|| git_glob::pattern::Case::Fold)
.unwrap_or_default();
let mut buf = Vec::with_capacity(512);
let state = git_worktree::fs::cache::State::IgnoreStack(git_worktree::fs::cache::state::Ignore::new(
overrides.unwrap_or_default(),
git_attributes::MatchGroup::<git_attributes::Ignore>::from_git_dir(
repo.git_dir(),
match repo.config.excludes_file.as_ref() {
Some(user_path) => Some(user_path.to_owned()),
None => repo.config.xdg_config_path("ignore")?,
},
&mut buf,
)?,
None,
case,
));
let attribute_list = state.build_attribute_list(index, index.path_backing(), case);
Ok(git_worktree::fs::Cache::new(
self.path,
state,
case,
buf,
attribute_list,
))
}
// pub fn
/// Open a new copy of the index file and decode it entirely.
///
/// It will use the `index.threads` configuration key to learn how many threads to use.
// TODO: test
#[cfg(feature = "git-index")]
pub fn open_index(&self) -> Result<git_index::File, crate::worktree::open_index::Error> {
use std::convert::{TryFrom, TryInto};
let repo = self.parent;
let thread_limit = repo
.config
.resolved
.boolean("index", None, "threads")
.map(|res| {
res.map(|value| if value { 0usize } else { 1 }).or_else(|err| {
git_config::values::Integer::try_from(err.input.as_ref())
.map_err(|err| crate::worktree::open_index::Error::ConfigIndexThreads {
value: err.input.clone(),
err,
})
.map(|value| value.to_decimal().and_then(|v| v.try_into().ok()).unwrap_or(1))
})
})
.transpose()?;
git_index::File::at(
repo.git_dir().join("index"),
git_index::decode::Options {
object_hash: repo.object_hash(),
thread_limit,
min_extension_block_in_bytes_for_threading: 0,
},
)
.map_err(Into::into)
}
}
|
#我的Home Assistant控件和配置
中国工作日,HA中自带了一个holiday的控件,但是没有中国的。。。
所以访问 http://www.k780.com 的API写了一个判断中国工作日的控件,试了一下包括19年五一这种比较奇葩的放假规定都可以支持。
copy custom_components 文件夹到HA的配置目录
configuration.yaml文件里添加:
```binary_sensor:
- platform: china_holiday
api_key: 10003
token: b59bc3ef6191eb9f747dd4e83c99f2a4
```
自动化添加里可以这样使用:
```
condition:
condition: state
entity_id: binary_sensor.gong_zuo_ri
state: '工作日/休息日'
```
|
package com.canvas.arc
import android.animation.TimeAnimator
import android.content.Context
import android.graphics.Canvas
import android.graphics.Color
import android.graphics.Paint
import android.graphics.PointF
import android.graphics.RadialGradient
import android.graphics.RectF
import android.graphics.Shader
import android.graphics.SweepGradient
import android.util.AttributeSet
import android.util.Log
import com.canvas.BaseCanvasView
import com.canvas.R
import com.extensions.RectFFactory
import com.extensions.getCompactColor
import com.extensions.px
import java.util.*
import kotlin.math.PI
import kotlin.math.cos
import kotlin.math.min
import kotlin.math.sin
class ClockView(
context: Context,
attributeSet: AttributeSet?
) : BaseCanvasView(context, attributeSet) {
private var second = 0
private var minute = 0
private var hour = 0
private var timeAnimator: TimeAnimator? = null
private val borderPaint = Paint().apply {
isAntiAlias = true
color = context.getCompactColor(R.color.white)
strokeWidth = 10.px.toFloat()
style = Paint.Style.STROKE
}
private val stepPaint = Paint().apply {
isAntiAlias = true
color = context.getCompactColor(R.color.white)
strokeWidth = 2.px.toFloat()
style = Paint.Style.STROKE
}
private val centerPaint = Paint().apply {
isAntiAlias = true
color = context.getCompactColor(R.color.white)
}
private val handsPaint = Paint().apply {
isAntiAlias = true
strokeCap = Paint.Cap.ROUND
strokeWidth = 10.px.toFloat()
}
private val secondsPaint = Paint().apply {
isAntiAlias = true
strokeCap = Paint.Cap.ROUND
strokeWidth = 5.px.toFloat()
}
override fun onAttachedToWindow() {
super.onAttachedToWindow()
timeAnimator = TimeAnimator().apply {
setTimeListener { animation, totalTime, deltaTime ->
startDateTime()
}
}
timeAnimator?.start()
}
override fun onDetachedFromWindow() {
super.onDetachedFromWindow()
timeAnimator?.cancel()
timeAnimator?.removeAllListeners()
timeAnimator?.setTimeListener(null)
timeAnimator = null
}
override fun onDraw(canvas: Canvas?) {
super.onDraw(canvas)
canvas?.drawColor(context.getCompactColor(R.color.colorPrimary))
canvas?.let {
drawSteps(it)
drawBackground(it)
drawBorder(it)
drawHourHand(it)
drawMinuteHand(it)
drawSecondsHand(it)
drawMiddleCircle(it)
}
}
private fun drawBackground(canvas: Canvas) {
contentPaint.color = context.getCompactColor(R.color.blue)
val cx = getBounds().centerX()
val cy = getBounds().centerY()
canvas.drawCircle(cx, cy, getRadius() * 0.8f, contentPaint)
}
private fun drawBorder(canvas: Canvas) {
val cx = getBounds().centerX()
val cy = getBounds().centerY()
canvas.drawCircle(cx, cy, getRadius() * 0.8f, borderPaint)
}
private fun drawMiddleCircle(canvas: Canvas) {
val cx = getBounds().centerX()
val cy = getBounds().centerY()
canvas.drawCircle(cx, cy, getRadius() * 0.1f, centerPaint)
}
private fun getBounds(): RectF {
return RectFFactory.fromCircle(
PointF(contentRect.centerX(), contentRect.centerY()),
getRadius()
)
}
private fun drawHourHand(canvas: Canvas) {
val radius = Math.toRadians(-90 + hour.toDouble() * 30 + minute * 0.5)
val endX = getBounds().centerX() + getRadius() * 0.3f * cos(radius)
val endY = getBounds().centerY() + getRadius() * 0.3f * sin(radius)
handsPaint.shader = RadialGradient(
getBounds().centerX(),
getBounds().centerY(),
getRadius() * 0.4f,
context.getCompactColor(R.color.orange),
context.getCompactColor(R.color.lightOrange),
Shader.TileMode.CLAMP
)
canvas.drawLine(
getBounds().centerX(),
getBounds().centerY(),
endX.toFloat(),
endY.toFloat(),
handsPaint
)
}
private fun drawMinuteHand(canvas: Canvas) {
val radius = Math.toRadians(-90 + minute * 6.0)
val endX = getBounds().centerX() + getRadius() * 0.5f * cos(radius)
val endY = getBounds().centerY() + getRadius() * 0.5f * sin(radius)
handsPaint.shader = RadialGradient(
getBounds().centerX(),
getBounds().centerY(),
getRadius() * 0.6f,
context.getCompactColor(R.color.colorPrimary),
context.getCompactColor(R.color.colorAccent),
Shader.TileMode.CLAMP
)
canvas.drawLine(
getBounds().centerX(),
getBounds().centerY(),
endX.toFloat(),
endY.toFloat(),
handsPaint
)
}
private fun drawSecondsHand(canvas: Canvas) {
val radius = Math.toRadians(-90 + second.toDouble() * 6)
val endX = getBounds().centerX() + getRadius() * 0.7f * cos(radius)
val endY = getBounds().centerY() + getRadius() * 0.7f * sin(radius)
secondsPaint.shader = RadialGradient(
getBounds().centerX(),
getBounds().centerY(),
getRadius() * 0.8f,
context.getCompactColor(R.color.yellow),
context.getCompactColor(R.color.veryLightGrey),
Shader.TileMode.CLAMP
)
canvas.drawLine(
getBounds().centerX(),
getBounds().centerY(),
endX.toFloat(),
endY.toFloat(),
secondsPaint
)
}
private fun drawSteps(canvas: Canvas) {
val centerX = getBounds().centerX()
val centerY = getBounds().centerY()
for (i in 0..360 step 12) {
val radians = Math.toRadians(-90 + i.toDouble())
val startX = centerX + getRadius() * cos(radians).toFloat()
val startY = centerY + getRadius() * sin(radians).toFloat()
val endX = centerX + getRadius() * 0.9f * cos(radians).toFloat()
val endY = centerY + getRadius() * 0.9f * sin(radians).toFloat()
canvas.drawLine(startX, startY, endX, endY, stepPaint)
}
}
private fun getRadius() = min(contentRect.right, contentRect.bottom) / 2
private fun startDateTime() {
val date = Calendar.getInstance()
second = date.get(Calendar.SECOND)
minute = date.get(Calendar.MINUTE)
hour = date.get(Calendar.HOUR)
// Log.i("TAG", "Hour: $hour - minute: $minute - second: $second")
invalidate()
}
}
|
'use strict';
var slug = require('./');
var test = require('tape');
test(function( t ) {
t.equal(slug('apple'), 'apple');
t.equal(slug('PIE'), 'pie');
t.equal(slug('Vrå öster'), 'vraa-oester');
t.equal(slug(' Vrå '), 'vraa');
t.equal(slug('Søren\'s party- and surprise store/shop'), 'soerens-party-and-surprise-store-shop');
t.end();
});
|
#pragma once
#include "Resources/Resource.h"
class ResourceScene : public Resource {
public:
REGISTER_RESOURCE(ResourceScene, ResourceType::SCENE);
void BuildScene();
};
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.InputSystem;
public class GameManager : MonoBehaviour
{
Controls controls;
Vector2 _mousePos;
Vector2 mousePos;
float shift;
public Texture2D[] cursors;
void Awake() // VERY First thing called
{
controls = new Controls();
}
void OnEnable()
{
controls.Enable();
controls.Gameplay.Shift.performed += ctx => shift = ctx.ReadValue<float>();
controls.Gameplay.Shift.canceled += ctx => shift = ctx.ReadValue<float>();
controls.Gameplay.MousePos.performed += OnMouseMove;
}
void OnDisable()
{
controls.Disable();
controls.Gameplay.MousePos.performed -= OnMouseMove;
}
void Start() //first frame
{
}
void Update() //every frame
{
mousePos = Camera.main.ScreenToWorldPoint(_mousePos);
}
void FixedUpdate()
{
OnShift();
}
private void OnMouseMove(InputAction.CallbackContext ctx)
{
_mousePos = ctx.ReadValue<Vector2>();
}
void OnShift()
{
if(shift > 0)
{
Cursor.SetCursor(cursors[1], mousePos, CursorMode.Auto);
}
else
{
Cursor.SetCursor(cursors[0], mousePos, CursorMode.Auto);
}
}
}
|
package taskrunner
import (
"context"
"fmt"
"testing"
"github.com/hashicorp/nomad/client/allocrunner/interfaces"
"github.com/hashicorp/nomad/client/devicemanager"
"github.com/hashicorp/nomad/helper/testlog"
"github.com/hashicorp/nomad/nomad/structs"
"github.com/hashicorp/nomad/plugins/device"
"github.com/hashicorp/nomad/plugins/drivers"
"github.com/stretchr/testify/require"
)
func TestDeviceHook_CorrectDevice(t *testing.T) {
t.Parallel()
require := require.New(t)
dm := devicemanager.NoopMockManager()
l := testlog.HCLogger(t)
h := newDeviceHook(dm, l)
reqDev := &structs.AllocatedDeviceResource{
Vendor: "foo",
Type: "bar",
Name: "baz",
DeviceIDs: []string{"123"},
}
// Build the hook request
req := &interfaces.TaskPrestartRequest{
TaskResources: &structs.AllocatedTaskResources{
Devices: []*structs.AllocatedDeviceResource{
reqDev,
},
},
}
// Setup the device manager to return a response
dm.ReserveF = func(d *structs.AllocatedDeviceResource) (*device.ContainerReservation, error) {
if d.Vendor != reqDev.Vendor || d.Type != reqDev.Type ||
d.Name != reqDev.Name || len(d.DeviceIDs) != 1 || d.DeviceIDs[0] != reqDev.DeviceIDs[0] {
return nil, fmt.Errorf("unexpected request: %+v", d)
}
res := &device.ContainerReservation{
Envs: map[string]string{
"123": "456",
},
Mounts: []*device.Mount{
{
ReadOnly: true,
TaskPath: "foo",
HostPath: "bar",
},
},
Devices: []*device.DeviceSpec{
{
TaskPath: "foo",
HostPath: "bar",
CgroupPerms: "123",
},
},
}
return res, nil
}
var resp interfaces.TaskPrestartResponse
err := h.Prestart(context.Background(), req, &resp)
require.NoError(err)
require.NotNil(resp)
expEnv := map[string]string{
"123": "456",
}
require.EqualValues(expEnv, resp.Env)
expMounts := []*drivers.MountConfig{
{
Readonly: true,
TaskPath: "foo",
HostPath: "bar",
},
}
require.EqualValues(expMounts, resp.Mounts)
expDevices := []*drivers.DeviceConfig{
{
TaskPath: "foo",
HostPath: "bar",
Permissions: "123",
},
}
require.EqualValues(expDevices, resp.Devices)
}
func TestDeviceHook_IncorrectDevice(t *testing.T) {
t.Parallel()
require := require.New(t)
dm := devicemanager.NoopMockManager()
l := testlog.HCLogger(t)
h := newDeviceHook(dm, l)
reqDev := &structs.AllocatedDeviceResource{
Vendor: "foo",
Type: "bar",
Name: "baz",
DeviceIDs: []string{"123"},
}
// Build the hook request
req := &interfaces.TaskPrestartRequest{
TaskResources: &structs.AllocatedTaskResources{
Devices: []*structs.AllocatedDeviceResource{
reqDev,
},
},
}
// Setup the device manager to return a response
dm.ReserveF = func(d *structs.AllocatedDeviceResource) (*device.ContainerReservation, error) {
return nil, fmt.Errorf("bad request")
}
var resp interfaces.TaskPrestartResponse
err := h.Prestart(context.Background(), req, &resp)
require.Error(err)
}
|
#!/bin/sh
# Source test support functions
. ./test-support.sh
# Source the configuration to get a reference to the queue
. ./test-steve.conf
#### Arrange goes here
touch $QUEUE/20150102125050.request
touch $QUEUE/20150102122020.request
touch $QUEUE/20150102123030.request
#### Act: Run steve
./execsteve.sh
EXITCODE=$?
#### Assert: Check expected results
arr=()
while read -r line;
do
arr+=("$line")
done <<< "`grep "Running request" $STEVE_OUTPUT`"
AssertEqual "${arr[0]}" "Running request 20150102122020"
AssertEqual "${arr[1]}" "Running request 20150102123030"
AssertEqual "${arr[2]}" "Running request 20150102125050"
|
package com.mb.scrapbook.app.stock.api
import com.mb.scrapbook.lib.base.network.response.BaseResponse
import okhttp3.Response
import retrofit2.http.GET
interface ApiStocks {
@GET("/")
suspend fun loadStock(): BaseResponse<String>
}
|
package com.epicodus.localbusinessapp;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import java.util.ArrayList;
import butterknife.Bind;
import butterknife.ButterKnife;
public class MenuActivity extends AppCompatActivity {
@Bind(R.id.listView) ListView mListView;
private ArrayList<String> newArray = new ArrayList<String>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_menu);
ButterKnife.bind(this);
newArray.add("warm shortcake w/ seasonal fruit");
newArray.add("glazed breakfast hushpuppies");
newArray.add("screen door praline bacon");
newArray.add("screen door fried chicken and waffle");
Intent intent = getIntent();
ArrayAdapter adapter = new ArrayAdapter(this, android.R.layout.simple_expandable_list_item_1,newArray);
mListView.setAdapter(adapter);
}
}
|
// crypto_generichash_blake2b.h
use libsodium_sys::*;
#[test]
fn test_crypto_generichash_blake2b_state_alignment() {
// this asserts the alignment applied in alignment_fix.patch (see gen.sh)
assert_eq!(64, std::mem::align_of::<crypto_generichash_blake2b_state>());
}
#[test]
fn test_crypto_generichash_blake2b_bytes_min() {
assert_eq!(
unsafe { crypto_generichash_blake2b_bytes_min() },
crypto_generichash_blake2b_BYTES_MIN as usize
)
}
#[test]
fn test_crypto_generichash_blake2b_bytes_max() {
assert_eq!(
unsafe { crypto_generichash_blake2b_bytes_max() },
crypto_generichash_blake2b_BYTES_MAX as usize
)
}
#[test]
fn test_crypto_generichash_blake2b_bytes() {
assert_eq!(
unsafe { crypto_generichash_blake2b_bytes() },
crypto_generichash_blake2b_BYTES as usize
)
}
#[test]
fn test_crypto_generichash_blake2b_keybytes_min() {
assert_eq!(
unsafe { crypto_generichash_blake2b_keybytes_min() },
crypto_generichash_blake2b_KEYBYTES_MIN as usize
)
}
#[test]
fn test_crypto_generichash_blake2b_keybytes_max() {
assert_eq!(
unsafe { crypto_generichash_blake2b_keybytes_max() },
crypto_generichash_blake2b_KEYBYTES_MAX as usize
)
}
#[test]
fn test_crypto_generichash_blake2b_keybytes() {
assert_eq!(
unsafe { crypto_generichash_blake2b_keybytes() },
crypto_generichash_blake2b_KEYBYTES as usize
)
}
#[test]
fn test_crypto_generichash_blake2b_saltbytes() {
assert_eq!(
unsafe { crypto_generichash_blake2b_saltbytes() },
crypto_generichash_blake2b_SALTBYTES as usize
)
}
#[test]
fn test_crypto_generichash_blake2b_personalbytes() {
assert_eq!(
unsafe { crypto_generichash_blake2b_personalbytes() },
crypto_generichash_blake2b_PERSONALBYTES as usize
)
}
#[test]
fn test_crypto_generichash_blake2b() {
let mut out = [0u8; crypto_generichash_blake2b_BYTES as usize];
let m = [0u8; 64];
let key = [0u8; crypto_generichash_blake2b_KEYBYTES as usize];
assert_eq!(
unsafe {
crypto_generichash_blake2b(
out.as_mut_ptr(),
out.len(),
m.as_ptr(),
m.len() as u64,
key.as_ptr(),
key.len(),
)
},
0
);
}
#[test]
fn test_crypto_generichash_blake2b_salt_personal() {
let mut out = [0u8; crypto_generichash_blake2b_BYTES as usize];
let m = [0u8; 64];
let key = [0u8; crypto_generichash_blake2b_KEYBYTES as usize];
let salt = [0u8; crypto_generichash_blake2b_SALTBYTES as usize];
let personal = [0u8; crypto_generichash_blake2b_PERSONALBYTES as usize];
assert_eq!(
unsafe {
crypto_generichash_blake2b_salt_personal(
out.as_mut_ptr(),
out.len(),
m.as_ptr(),
m.len() as u64,
key.as_ptr(),
key.len(),
salt.as_ptr(),
personal.as_ptr(),
)
},
0
);
}
|
package datawave;
import datawave.ingest.data.RawRecordContainer;
import datawave.ingest.data.config.NormalizedContentInterface;
import datawave.ingest.data.config.ingest.BaseIngestHelper;
import com.google.common.collect.Multimap;
public class TestBaseIngestHelper extends BaseIngestHelper {
private final Multimap<String,NormalizedContentInterface> eventFields;
/**
* Deliberately return null from getEventFields when created
*/
public TestBaseIngestHelper() {
this(null);
}
public TestBaseIngestHelper(Multimap<String,NormalizedContentInterface> eventFields) {
super();
this.eventFields = eventFields;
}
@Override
public Multimap<String,NormalizedContentInterface> getEventFields(RawRecordContainer event) {
return eventFields;
}
@Override
public boolean isDataTypeField(String fieldName) {
return false;
}
@Override
public boolean isCompositeField(String fieldName) {
return false;
}
}
|
#### 指令
```
su [username] // 切换用户
useradd [username] // 添加用户,默认会添加同名的用户组(root)
passwd [username] // 设置用户的登录密码(root)
userdel [username] -r // 删除用户(root),-r表示同时删除用户目录
```
#### 使用户可获得sudo执行权限
```
// 方式1:将用户添加至wheel群组中
sudo gpasswd -a zhangsan wheel // 查看群组下的用户:sudo lid -g wheel
// 方式2:添加用户同名文件夹至 /etc/sudoers.d文件夹下
```
#### 文件和文件夹权限
> ##### u=用户g=群组o=其他
>
> ##### r=4,w=2,x=1
> #### 修改权限
>
> ```
> chmod xxx [file_path] // 修改文件权限
> chmod o+r [file_path] // 为其他人添加读取权限
> chmod g+x [file_path] // 为群组添加执行权限
> chmod u-w [file_path] // 为用户去除写入权限
> chmod -R xxx [dir_path] // 修改文件夹权限 ,-R表示递归修改
> chown -R leon:leon [dir_path] // 修改文件夹的所属用户和用户组
> ```
```
touch files/file_{01..10}.txt // 在files目录下创建是个文件
```
|
require 'sequel'
class Cranium::Sequel::Hash < Hash
def qualify(options)
invalid_options = options.keys - [:keys_with, :values_with]
raise ArgumentError, "Unsupported option for qualify: #{invalid_options.first}" unless invalid_options.empty?
Hash[qualify_fields(options[:keys_with], keys).zip qualify_fields(options[:values_with], values)]
end
def qualified_keys(qualifier)
qualify_fields qualifier, keys
end
def qualified_values(qualifier)
qualify_fields qualifier, values
end
private
def qualify_fields(qualifier, fields)
return fields if qualifier.nil?
fields.map { |field| Sequel.qualify qualifier, field }
end
end
|
---
layout: post
title: "Subdom"
date: 2020-01-26 06:02:53 -0500
categories: comic procreate
---

... Wait here.
|
use std::{cell::RefCell, collections::HashSet, rc::Rc, sync::Arc};
use crate::{
communication::{RecvEndpoint, TryRecvError},
dataflow::{Data, Message, State, Timestamp},
node::operator_event::OperatorEvent,
};
use super::{
errors::{ReadError, TryReadError},
EventMakerT, InternalStatefulReadStream, StreamId,
};
// TODO: split between system read streams and user accessible read streams to avoid Rc<RefCell<...>> in operator
pub struct InternalReadStream<D: Data> {
/// The id of the stream.
id: StreamId,
/// User-defined stream name.
name: String,
/// Whether the stream is closed.
closed: bool,
/// The endpoint on which the stream receives data.
recv_endpoint: Option<RecvEndpoint<Arc<Message<D>>>>,
/// Vector of stream bundles that must be invoked when this stream receives a message.
children: Vec<Rc<RefCell<dyn EventMakerT<EventDataType = D>>>>,
/// A vector on callbacks registered on the stream.
callbacks: Vec<Arc<dyn Fn(&Timestamp, &D)>>,
/// A vector of watermark callbacks registered on the stream.
watermark_cbs: Vec<Arc<dyn Fn(&Timestamp)>>,
}
impl<D: Data> InternalReadStream<D> {
/// Create a stream into which we can write data.
pub fn new() -> Self {
let id = StreamId::new_deterministic();
Self {
id,
name: id.to_string(),
closed: false,
recv_endpoint: None,
children: Vec::new(),
callbacks: Vec::new(),
watermark_cbs: Vec::new(),
}
}
pub fn new_with_id_name(id: StreamId, name: &str) -> Self {
Self {
id,
name: name.to_string(),
closed: false,
recv_endpoint: None,
children: Vec::new(),
callbacks: Vec::new(),
watermark_cbs: Vec::new(),
}
}
pub fn get_id(&self) -> StreamId {
self.id
}
pub fn get_name(&self) -> &str {
&self.name[..]
}
pub fn is_closed(&self) -> bool {
self.closed
}
pub fn from_endpoint(recv_endpoint: RecvEndpoint<Arc<Message<D>>>, id: StreamId) -> Self {
Self {
id,
name: id.to_string(),
closed: false,
recv_endpoint: Some(recv_endpoint),
children: Vec::new(),
callbacks: Vec::new(),
watermark_cbs: Vec::new(),
}
}
/// Add a callback to be invoked when the stream receives a message.
pub fn add_callback<F: 'static + Fn(&Timestamp, &D)>(&mut self, callback: F) {
self.callbacks.push(Arc::new(callback));
}
/// Add a callback to be invoked after the stream received, and the operator
/// processed all the messages with a timestamp.
pub fn add_watermark_callback<F: 'static + Fn(&Timestamp)>(&mut self, callback: F) {
self.watermark_cbs.push(Arc::new(callback));
}
/// Returns a new instance of the stream with state associated to it.
pub fn add_state<S: State>(
&mut self,
state: S,
) -> Rc<RefCell<InternalStatefulReadStream<D, S>>> {
let child = Rc::new(RefCell::new(InternalStatefulReadStream::new(self, state)));
self.children
.push(Rc::clone(&child) as Rc<RefCell<dyn EventMakerT<EventDataType = D>>>);
child
}
pub fn take_endpoint(&mut self) -> Option<RecvEndpoint<Arc<Message<D>>>> {
self.recv_endpoint.take()
}
/// Tries to read a message from a channel.
///
/// Returns an immutable reference, or `None` if no messages are
/// available at the moment (i.e., non-blocking read).
pub fn try_read(&mut self) -> Result<Message<D>, TryReadError> {
if self.closed {
return Err(TryReadError::Closed);
}
let result = self
.recv_endpoint
.as_mut()
.map_or(Err(TryReadError::Disconnected), |rx| {
rx.try_read()
.map(|msg| Message::clone(&msg))
.map_err(TryReadError::from)
});
if result
.as_ref()
.map(Message::is_top_watermark)
.unwrap_or(false)
{
self.closed = true;
self.recv_endpoint = None;
}
result
}
/// Blocking read which polls the tokio channel.
// TODO: make async or find a way to run on tokio.
pub fn read(&mut self) -> Result<Message<D>, ReadError> {
if self.closed {
return Err(ReadError::Closed);
}
// Poll for the next message
let result = self
.recv_endpoint
.as_mut()
.map_or(Err(ReadError::Disconnected), |rx| loop {
match rx.try_read() {
Ok(msg) => {
break Ok(Message::clone(&msg));
}
Err(TryRecvError::Empty) => (),
Err(TryRecvError::Disconnected) => {
break Err(ReadError::Disconnected);
}
Err(TryRecvError::BincodeError(_)) => {
break Err(ReadError::SerializationError);
}
}
});
if result
.as_ref()
.map(Message::is_top_watermark)
.unwrap_or(false)
{
self.closed = true;
self.recv_endpoint = None;
}
result
}
}
impl<D: Data> Default for InternalReadStream<D> {
fn default() -> Self {
Self::new()
}
}
impl<D: Data> EventMakerT for InternalReadStream<D> {
type EventDataType = D;
fn get_id(&self) -> StreamId {
self.id
}
fn make_events(&self, msg: Arc<Message<Self::EventDataType>>) -> Vec<OperatorEvent> {
let mut events: Vec<OperatorEvent> = Vec::new();
match msg.as_ref() {
Message::TimestampedData(_) => {
// Stateless callbacks may run in parallel, so create 1 event for each
let stateless_cbs = self.callbacks.clone();
for callback in stateless_cbs {
let msg_arc = Arc::clone(&msg);
events.push(OperatorEvent::new(
msg_arc.timestamp().clone(),
false,
0,
HashSet::with_capacity(0),
HashSet::with_capacity(0),
move || {
(callback)(msg_arc.timestamp(), msg_arc.data().unwrap());
},
))
}
}
Message::Watermark(timestamp) => {
let watermark_cbs = self.watermark_cbs.clone();
for watermark_cb in watermark_cbs {
let cb = Arc::clone(&watermark_cb);
let timestamp_copy = timestamp.clone();
events.push(OperatorEvent::new(
timestamp.clone(),
true,
0,
HashSet::with_capacity(0),
HashSet::with_capacity(0),
move || (cb)(×tamp_copy),
));
}
}
}
for child in self.children.iter() {
events.append(&mut child.borrow_mut().make_events(msg.clone()));
}
events
}
}
|
<?php
/**
* 后台入口文件
*/
@session_start();
$_SESSION['adminlogin'] = 1;
header("Location: ../index.php?g=admin");
|
<?php
App::uses('AclNode', 'Model');
/**
* AclAro Model
*
* PHP version 5
*
* @category Model
* @package Croogo.Acl.Model
* @version 1.0
* @author Fahad Ibnay Heylaal <[email protected]>
* @license http://www.opensource.org/licenses/mit-license.php The MIT License
* @link http://www.croogo.org
*/
class AclAro extends AclNode {
/**
* name
*
* @var string
*/
public $name = 'AclAro';
/**
* useTable
*
* @var string
*/
public $useTable = 'aros';
/**
* alias
*/
public $alias = 'Aro';
/**
* hasAndBelongsToMany
*/
public $hasAndBelongsToMany = array(
'Aco' => array(
'with' => 'Acl.AclPermission',
),
);
/**
* Get a list of Role AROs
*
* @return array array of Aro.id indexed by Role.id
*/
public function getRoles($roles) {
$aros = $this->find('all', array(
'conditions' => array(
'Aro.model' => 'Role',
'Aro.foreign_key' => array_keys($roles),
),
));
return Hash::combine($aros, '{n}.Aro.foreign_key', '{n}.Aro.id');
}
}
|
#!/bin/sh
echo "This script helps you to find files which are not documented"
echo "in debian/copyright. When a set of files has been documented,"
echo "please write a short regexp into $0's source."
# write the regexps of already documented files there:
alreadyOKpatterns='uglifyjs2|uglify|source-map|esprima|opto.build|blank|xpconnect|/node|/browser|/rhino'
for f in $(find build/jslib -name "*.js"| grep -Ev "$alreadyOKpatterns"); do
if !(cat $f | head -n 10 | grep -q "The Dojo Foundation All Rights Reserved"); then
echo "============= $f ================"
l="$(cat $f | head -n 10 | grep -i license)"
c="$(cat $f | head -n 10 | grep -Ei 'copyright|\(c\)')"
echo "License: $l"
echo "©: $c"
fi
done
|
#!/usr/bin/env python3
# usage python3 publisher_demo.py [topic] [data]
import paho.mqtt.client as mqtt
import sys
MQTT_BROKER_ADDRESS = "192.168.1.125"
MQTT_PORT = 1883
MQTT_STAYALIVE = 60
topic = sys.argv[1]
data = sys.argv[2]
print("topic: " + str(topic) + "\t data: " + str(data))
client = mqtt.Client()
client.connect(MQTT_BROKER_ADDRESS, MQTT_PORT, MQTT_STAYALIVE)
client.publish(topic, data)
client.disconnect()
|
package com.jetbrains.rider.plugins.unity.ideaInterop.fileTypes.uss
import com.intellij.psi.css.impl.util.editor.CssBreadcrumbsInfoProvider
// Allows enabling/disabling breadcrumbs for USS
class UssFileBreadcrumbsProvider: CssBreadcrumbsInfoProvider() {
override fun getLanguages()= arrayOf(UssLanguage)
}
|
// Auto-Generated
package com.github.j5ik2o.reactive.aws.s3.model.ops
import software.amazon.awssdk.services.s3.model._
final class ObjectLockRetentionBuilderOps(val self: ObjectLockRetention.Builder) extends AnyVal {
@SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
final def modeAsScala(value: Option[ObjectLockRetentionMode]): ObjectLockRetention.Builder = {
value.fold(self) { v =>
self.mode(v)
}
}
@SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
final def retainUntilDateAsScala(value: Option[java.time.Instant]): ObjectLockRetention.Builder = {
value.fold(self) { v =>
self.retainUntilDate(v)
}
}
}
final class ObjectLockRetentionOps(val self: ObjectLockRetention) extends AnyVal {
@SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
final def modeAsScala: Option[ObjectLockRetentionMode] = Option(self.mode)
@SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
final def retainUntilDateAsScala: Option[java.time.Instant] = Option(self.retainUntilDate)
}
@SuppressWarnings(Array("org.wartremover.warts.ImplicitConversion"))
trait ToObjectLockRetentionOps {
implicit def toObjectLockRetentionBuilderOps(v: ObjectLockRetention.Builder): ObjectLockRetentionBuilderOps =
new ObjectLockRetentionBuilderOps(v)
implicit def toObjectLockRetentionOps(v: ObjectLockRetention): ObjectLockRetentionOps = new ObjectLockRetentionOps(v)
}
|
// run-pass
// aux-build:issue-11529.rs
// pretty-expanded FIXME #23616
extern crate issue_11529 as a;
fn main() {
let one = 1;
let _a = a::A(&one);
}
|
// Copyright (c) Microsoft Corporation. All Rights Reserved.
using System;
using System.Configuration;
using System.ServiceModel;
using System.ServiceModel.Description;
namespace Microsoft.ServiceModel.Samples
{
// Define a service contract.
[ServiceContract(Namespace="http://Microsoft.ServiceModel.Samples")]
public interface ICalculator
{
[OperationContract]
double Add(double n1, double n2);
[OperationContract]
double Subtract(double n1, double n2);
[OperationContract]
double Multiply(double n1, double n2);
[OperationContract]
double Divide(double n1, double n2);
}
// Service class which implements the service contract.
// Added code to write output to the console window
public class CalculatorService : ICalculator
{
public double Add(double n1, double n2)
{
double result = n1 + n2;
Console.WriteLine("Received Add({0},{1})", n1, n2);
Console.WriteLine("Return: {0}", result);
return result;
}
public double Subtract(double n1, double n2)
{
double result = n1 - n2;
Console.WriteLine("Received Subtract({0},{1})", n1, n2);
Console.WriteLine("Return: {0}", result);
return result;
}
public double Multiply(double n1, double n2)
{
double result = n1 * n2;
Console.WriteLine("Received Multiply({0},{1})", n1, n2);
Console.WriteLine("Return: {0}", result);
return result;
}
public double Divide(double n1, double n2)
{
double result = n1 / n2;
Console.WriteLine("Received Divide({0},{1})", n1, n2);
Console.WriteLine("Return: {0}", result);
return result;
}
// Host the service within this EXE console application.
public static void Main()
{
// Get base address from app settings in configuration
Uri baseAddress = new Uri("http://localhost:8000/servicemodelsamples/service");
// Create a ServiceHost for the CalculatorService type and provide the base address.
using (ServiceHost serviceHost = new ServiceHost(typeof(CalculatorService), baseAddress))
{
// <Snippet1>
NetNamedPipeBinding nnpb = new NetNamedPipeBinding();
NetNamedPipeSecurity nnpSecurity = nnpb.Security;
// <Snippet2>
nnpSecurity.Mode = NetNamedPipeSecurityMode.Transport;
// </Snippet2>
// <Snippet3>
NamedPipeTransportSecurity npts = nnpSecurity.Transport;
// </Snippet3>
serviceHost.AddServiceEndpoint(typeof(ICalculator), nnpb, "net.pipe://localhost/ServiceModelSamples/Service");
// </Snippet1>
// Add a mex endpoint
ServiceMetadataBehavior smb = new ServiceMetadataBehavior();
smb.HttpGetEnabled = true;
smb.HttpGetUrl = new Uri("http://localhost:8000/servicemodelsamples/service");
serviceHost.Description.Behaviors.Add(smb);
// Open the ServiceHostBase to create listeners and start listening for messages.
serviceHost.Open();
// The service can now be accessed.
Console.WriteLine("The service is ready.");
Console.WriteLine("Press <ENTER> to terminate service.");
Console.WriteLine();
Console.ReadLine();
// Close the ServiceHostBase to shutdown the service.
serviceHost.Close();
}
}
}
}
|
/*
El gran libro de Kotlin
(para programadores de back end)
Editorial: Marcombo (https://www.marcombo.com/)
Autor: Luis Criado Fernández (http://luis.criado.online/)
CAPÍTULO 9: NÚMEROS.
*/
package marcombo.lcriadof.capitulo9
// rango de numeros
fun main() {
println("tipo, tamaño, rango ")
println("Byte, "+Byte.SIZE_BITS+" bits, "+Byte.MIN_VALUE+" .. "+Byte.MAX_VALUE) // 1
println("Short, "+Short.SIZE_BITS+" bits, "+Short.MIN_VALUE+" .. "+Short.MAX_VALUE)
println("Int, "+Int.SIZE_BITS+" bits, "+Int.MIN_VALUE+" .. "+Int.MAX_VALUE)
println("Long, "+Long.SIZE_BITS+" bits, "+Long.MIN_VALUE+" .. "+Long.MAX_VALUE)
println("Float, "+Float.SIZE_BITS+" bits, "+Float.MIN_VALUE+" .. "+Float.MAX_VALUE)
println("Double,"+Double.SIZE_BITS+" bits, "+Double.MIN_VALUE+" .. "+Double.MAX_VALUE)
}
|
package handler
import (
"encoding/json"
"net/http"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
)
func New(reconciler reconcile.Reconciler) *Handler {
return &Handler{reconciler: reconciler}
}
type Handler struct {
reconciler reconcile.Reconciler
}
func (h *Handler) Handle(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
w.Header().Add("Accept", http.MethodPost)
http.Error(w, "Method not allowed", 405)
return
}
nn := types.NamespacedName{}
err := json.NewDecoder(r.Body).Decode(&nn)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
request := reconcile.Request{NamespacedName: nn}
result, err := h.reconciler.Reconcile(request)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
json.NewEncoder(w).Encode(&result)
}
|
function update_W!(i::Int, s::State, c::Constants, d::Data)
currParam = c.W_prior.alpha
counts = zeros(c.K)
for n in 1:d.N[i]
k = s.lam[i][n]
if k > 0
counts[k] += 1
end
end
updatedParam = currParam .+ counts
s.W[i, :] = rand(Dirichlet(updatedParam))
end
function update_W!(s::State, c::Constants, d::Data)
for i in 1:d.I
update_W!(i, s, c, d)
end
end
|
# frozen_string_literal: true
require "test_helper"
describe Committee::RequestUnpacker do
it "unpacks JSON on Content-Type: application/json" do
env = {
"CONTENT_TYPE" => "application/json",
"rack.input" => StringIO.new('{"x":"y"}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{ "x" => "y" }, false], unpacker.unpack_request_params(request))
end
it "unpacks JSON on Content-Type: application/vnd.api+json" do
env = {
"CONTENT_TYPE" => "application/vnd.api+json",
"rack.input" => StringIO.new('{"x":"y"}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{ "x" => "y" }, false], unpacker.unpack_request_params(request))
end
it "unpacks JSON on no Content-Type" do
env = {
"rack.input" => StringIO.new('{"x":"y"}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{ "x" => "y" }, false], unpacker.unpack_request_params(request))
end
it "doesn't unpack JSON on application/x-ndjson" do
env = {
"CONTENT_TYPE" => "application/x-ndjson",
"rack.input" => StringIO.new('{"x":"y"}\n{"a":"b"}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{}, false], unpacker.unpack_request_params(request))
end
it "doesn't unpack JSON under other Content-Types" do
%w[application/x-www-form-urlencoded multipart/form-data].each do |content_type|
env = {
"CONTENT_TYPE" => content_type,
"rack.input" => StringIO.new('{"x":"y"}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{}, false], unpacker.unpack_request_params(request))
end
end
it "unpacks JSON under other Content-Types with optimistic_json" do
%w[application/x-www-form-urlencoded multipart/form-data].each do |content_type|
env = {
"CONTENT_TYPE" => content_type,
"rack.input" => StringIO.new('{"x":"y"}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new(optimistic_json: true)
assert_equal([{ "x" => "y" }, false], unpacker.unpack_request_params(request))
end
end
it "returns {} when unpacking non-JSON with optimistic_json" do
%w[application/x-www-form-urlencoded multipart/form-data].each do |content_type|
env = {
"CONTENT_TYPE" => content_type,
"rack.input" => StringIO.new('x=y&foo=42'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new(optimistic_json: true)
assert_equal([{}, false], unpacker.unpack_request_params(request))
end
end
it "unpacks an empty hash on an empty request body" do
env = {
"CONTENT_TYPE" => "application/json",
"rack.input" => StringIO.new(""),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{}, false], unpacker.unpack_request_params(request))
end
it "doesn't unpack form params" do
%w[application/x-www-form-urlencoded multipart/form-data].each do |content_type|
env = {
"CONTENT_TYPE" => content_type,
"rack.input" => StringIO.new("x=y"),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{}, false], unpacker.unpack_request_params(request))
end
end
it "unpacks form params with allow_form_params" do
%w[application/x-www-form-urlencoded multipart/form-data].each do |content_type|
env = {
"CONTENT_TYPE" => content_type,
"rack.input" => StringIO.new("x=y"),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new(allow_form_params: true)
assert_equal([{ "x" => "y" }, true], unpacker.unpack_request_params(request))
end
end
it "unpacks form & query params with allow_form_params and allow_query_params" do
%w[application/x-www-form-urlencoded multipart/form-data].each do |content_type|
env = {
"CONTENT_TYPE" => content_type,
"rack.input" => StringIO.new("x=y"),
"QUERY_STRING" => "a=b"
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new(allow_form_params: true, allow_query_params: true)
assert_equal([ { "x" => "y"}, true], unpacker.unpack_request_params(request))
end
end
it "unpacks query params with allow_query_params" do
env = {
"rack.input" => StringIO.new(""),
"QUERY_STRING" => "a=b"
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new(allow_query_params: true)
assert_equal({ "a" => "b" }, unpacker.unpack_query_params(request))
end
it "errors if JSON is not an object" do
env = {
"CONTENT_TYPE" => "application/json",
"rack.input" => StringIO.new('[2]'),
}
request = Rack::Request.new(env)
assert_raises(Committee::BadRequest) do
Committee::RequestUnpacker.new.unpack_request_params(request)
end
end
it "errors on an unknown Content-Type" do
env = {
"CONTENT_TYPE" => "application/whats-this",
"rack.input" => StringIO.new('{"x":"y"}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{}, false], unpacker.unpack_request_params(request))
end
# this is mostly here for line coverage
it "unpacks JSON containing an array" do
env = {
"rack.input" => StringIO.new('{"x":[]}'),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new
assert_equal([{ "x" => [] }, false], unpacker.unpack_request_params(request))
end
it "unpacks http header" do
env = {
"HTTP_FOO_BAR" => "some header value",
"rack.input" => StringIO.new(""),
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new({ allow_header_params: true })
assert_equal({ "FOO-BAR" => "some header value" }, unpacker.unpack_headers(request))
end
it "includes request body when`use_get_body` is true" do
env = {
"rack.input" => StringIO.new('{"x":1, "y":2}'),
"REQUEST_METHOD" => "GET",
"QUERY_STRING"=>"data=value&x=aaa",
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new({ allow_query_params: true, allow_get_body: true })
assert_equal([{ 'x' => 1, 'y' => 2 }, false], unpacker.unpack_request_params(request))
end
it "doesn't include request body when `use_get_body` is false" do
env = {
"rack.input" => StringIO.new('{"x":1, "y":2}'),
"REQUEST_METHOD" => "GET",
"QUERY_STRING"=>"data=value&x=aaa",
}
request = Rack::Request.new(env)
unpacker = Committee::RequestUnpacker.new({ allow_query_params: true, use_get_body: false })
assert_equal({ 'data' => 'value', 'x' => 'aaa' }, unpacker.unpack_query_params(request))
end
end
|
class RailsProxify::ApplicationController < ActionController::Base
rescue_from Exception do |e|
render json: { error: "An error occured: #{e.message}" }, status: 422
end
end
|
package typingsSlinky.twilioVideo.mod
import typingsSlinky.twilioVideo.mod.Track.ID
import typingsSlinky.twilioVideo.mod.Track.SID
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@JSImport("twilio-video", "TrackStats")
@js.native
class TrackStats () extends StObject {
var codec: String | Null = js.native
var packetsLost: Double | Null = js.native
var ssrc: String = js.native
var timestamp: Double = js.native
var trackId: ID = js.native
var trackSid: SID = js.native
}
|
Ignore this directory
=====================
These scripts are ephemeral and were only ever meant to work on the pinned
revisions listed in the makefile. They were created to generate `latest.json`
adhoc by scraping source code and documentation. Such material, by nature, has
no reliable interface. The date of the last commit to touch this directory
can be considered its sell-by.
## Requirements:
- `curl`, `tar`, `unzip`
- `hg`, `git`
- `latexmk`
- `makeinfo`
- `node` (version 6 or above)
- `python3`
- `texlive` (or equivalent)
## Building lshort
`lshort.idx` is an intermediate "build dependency" of the `lshort` book.
These are generated during the build process and not contained in the
`*.src.tar.gz` archive. Nor are they included in the various OS distribution
packages (e.g., `tex-lang-english` on Debian). More info on the build
requirements is available at [ctan] [1].
## Corner case example w. Fedora 24
This assumes all TeX packages from `sagemath` are already installed:
```
# dnf install \
texlive-cbfonts \
texlive-lgreek \
texlive-xypic \
texlive-IEEEtran \
texlive-lastpage \
texlive-greek-fo \
texlive-polyglos \
texlive-babel-gr \
texlive-babel-ge \
texlive-babel-fr \
texlive-numprint
```
[1]: http://www.ctan.org/tex-archive/info/lshort/english
|
import 'package:vector_math/vector_math_64.dart';
import 'dart:math' as math;
Matrix4 createTransformMatrix(Matrix4? origin, Vector3? position, Vector3? scale,
Vector4? rotation, Vector3? eulerAngles) {
final transform = origin ?? Matrix4.identity();
if (position != null) {
transform.setTranslation(position);
}
if (rotation != null) {
transform.rotate(
Vector3(rotation[0], rotation[1], rotation[2]), rotation[3]);
}
if (eulerAngles != null) {
transform.matrixEulerAngles = eulerAngles;
}
if (scale != null) {
transform.scale(scale);
} else {
transform.scale(1.0);
}
return transform;
}
extension Matrix4Extenstion on Matrix4 {
Vector3 get matrixScale {
final scale = Vector3.zero();
decompose(Vector3.zero(), Quaternion(0, 0, 0, 0), scale);
return scale;
}
Vector3 get matrixEulerAngles {
final q = Quaternion(0, 0, 0, 0);
decompose(Vector3.zero(), q, Vector3.zero());
final t = q.x;
q.x = q.y;
q.y = t;
final angles = Vector3.zero();
// roll (x-axis rotation)
final sinr_cosp = 2 * (q.w * q.x + q.y * q.z);
final cosr_cosp = 1 - 2 * (q.x * q.x + q.y * q.y);
angles[0] = math.atan2(sinr_cosp, cosr_cosp);
// pitch (y-axis rotation)
final sinp = 2 * (q.w * q.y - q.z * q.x);
if (sinp.abs() >= 1) {
angles[1] =
_copySign(math.pi / 2, sinp); // use 90 degrees if out of range
} else {
angles[1] = math.asin(sinp);
}
// yaw (z-axis rotation)
final siny_cosp = 2 * (q.w * q.z + q.x * q.y);
final cosy_cosp = 1 - 2 * (q.y * q.y + q.z * q.z);
angles[2] = math.atan2(siny_cosp, cosy_cosp);
return angles;
}
set matrixEulerAngles(Vector3 angles) {
final translation = Vector3.zero();
final scale = Vector3.zero();
decompose(translation, Quaternion(0, 0, 0, 0), scale);
final r = Quaternion.euler(angles[0], angles[1], angles[2]);
setFromTranslationRotationScale(translation, r, scale);
}
}
// https://scidart.org/docs/scidart/numdart/copySign.html
double _copySign(double magnitude, double sign) {
// The highest order bit is going to be zero if the
// highest order bit of m and s is the same and one otherwise.
// So (m^s) will be positive if both m and s have the same sign
// and negative otherwise.
/*final long m = Double.doubleToRawLongBits(magnitude); // don't care about NaN
final long s = Double.doubleToRawLongBits(sign);
if ((m^s) >= 0) {
return magnitude;
}
return -magnitude; // flip sign*/
if (sign == 0.0 || sign.isNaN || magnitude.sign == sign.sign) {
return magnitude;
}
return -magnitude; // flip sign
}
|
# Clipboard To Script
#### Designed for those who love testing internet ready codes and do not like waiting!
1. Just copy the code / shader / text.
2. Right click on the project tab.
3. Choose the file format from the "From Clipboard" menu.
4. Enter the File Name.
## AWSOME
*Your script was created!*
[AssetStore link](https://assetstore.unity.com/packages/tools/utilities/clipboard-to-script-143021?aid=1011l4JRk)
|
-module(raft_stm).
-export([load/2, get_last_lid/2, make_wal/2, apply_wal/4]).
-export_type([cfg/0, stm/0]).
-type cfg() :: term().
-type stm() :: term().
-type lid() :: raft:lid().
-type wal() :: raft:wal().
-type op() :: term().
-callback load(Cfg :: term()) -> stm().
-callback get_last_lid(stm()) -> lid().
-callback apply_wal(stm(), lid(), wal()) -> stm().
-callback make_wal(op()) -> wal().
-include("raft_int.hrl").
%% @doc Called at start/restart.
%% Load persisted state from disk.
-spec load(module(), cfg()) -> stm().
load(Mod, Cfg) -> Mod:load(Cfg).
%% @doc Read last-lid from statemachine.
%% This is for log feeder to know from which point on it
%% should feed logs to the statemachine after start/restart
-spec get_last_lid(module(), stm()) -> lid().
get_last_lid(Mod, Stm) ->
case Mod:get_last_lid(Stm) of
?LID(_, _) = Lid -> Lid;
X when X =:= false orelse X =:= undefined -> ?NO_PREV_LID
end.
%% @doc Apply log entry to statemachine.
-spec apply_wal(module(), stm(), lid(), wal()) -> stm().
apply_wal(Mod, Stm, Lid, Wal) ->
Mod:apply_wal(Stm, Lid, Wal).
%% @doc Serialize statemachine OP into binary log entry.
-spec make_wal(module(), op()) -> wal().
make_wal(Mod, Op) ->
Mod:make_wal(Op).
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 8 13:17:12 2018
@author: Raj
"""
from ffta.hdf_utils import hdf_utils
from matplotlib import pyplot as plt
def test_pixel(h5_file, param_changes={}, pxls = 1, showplots = True,
verbose=True, clear_filter = False):
"""
Takes a random pixel and does standard processing.
Parameters
----------
This is to tune parameters prior to processing an entire image
h5_file : h5Py File, path, Dataset
H5 file to process
pxls : int, optional
Number of random pixels to survey
showplots : bool, optional
Whether to create a new plot or not.
verbose : bool , optional
To print to command line. Currently for future-proofing
clear_filter : bool, optional
Whether to do filtering (FIR) or not
"""
# get_pixel can work on Datasets or the H5_File
if any(param_changes):
hdf_utils.change_params(h5_file, new_vals=param_changes)
parameters = hdf_utils.get_params(h5_file)
cols = parameters['num_cols']
rows = parameters['num_rows']
# Creates random pixels to sample
pixels = []
if pxls == 1:
pixels.append([0,0])
if pxls > 1:
from numpy.random import randint
for i in range(pxls):
pixels.append([randint(0,rows), randint(0,cols)])
# Analyzes all pixels
for rc in pixels:
h5_px = hdf_utils.get_pixel(h5_file, rc=rc)
if clear_filter:
h5_px.clear_filter_flags()
h5_px.analyze()
print(rc, h5_px.tfp)
if showplots == True:
plt.plot(h5_px.best_fit, 'r--')
plt.plot(h5_px.cut, 'g-')
return
|
<?php
/**
* @package oakcms
* @author Hryvinskyi Volodymyr <[email protected]>
* @copyright Copyright (c) 2015 - 2017. Hryvinskyi Volodymyr
* @version 0.0.1-beta.0.1
*/
use app\modules\form_builder\components\ActiveForm;
/**
* @var $model \app\modules\form_builder\models\FormBuilderForms
* @var $formModel \app\modules\form_builder\models\FormBuilder
*/
$index = new \app\components\Count();
$form = ActiveForm::begin([
'model' => $formModel,
'formId' => $model->id,
'id' => 'fb_form_id_' . $model->id . '_' . $index->getIndex(),
'action' => \yii\helpers\Url::to(),
'options' => ['class' => 'fb_form'],
]);
?>
<?= $model->renderForm($form); ?>
<?php
ActiveForm::end();
|
'use strict';
module.exports = (name) => {
return `<div>
<div class="${name.original}" data-o-component="${name.original}"></div>
</div>`;
};
|
set -o allexport; source ../.env; set +o allexport
echo ${DB_HOST}
mongoimport --db ${DB_NAME} --collection JMDict \
--host ${DB_HOST} --port ${DB_PORT} \
--username ${DB_USER} --password ${DB_PASS} \
--drop --file "./JMdict Kanjidic files/JMdict/Finalize_JMdict_e.json" --jsonArray
|
<?php
namespace Oro\Bundle\ApiBundle\Processor;
/**
* The base execution context for processors for actions that execute processors
* only from one group at the same time.
*/
class ByStepNormalizeResultContext extends NormalizeResultContext
{
/**
* the name of the group after that processors from "normalize_result" group are executed
* if no errors are occurred
*/
private const SOURCE_GROUP = 'sourceGroup';
/** the name of the last group that execution was finished with an error or an exception */
private const FAILED_GROUP = 'failedGroup';
/**
* Gets the name of the group after that processors from "normalize_result" group are executed
* if no errors are occurred.
*
* @return string|null
*/
public function getSourceGroup(): ?string
{
return $this->get(self::SOURCE_GROUP);
}
/**
* Sets the name of the group after that processors from "normalize_result" group are executed
* if no errors are occurred.
*
* @param string|null $groupName
*/
public function setSourceGroup(?string $groupName): void
{
if (null === $groupName) {
$this->remove(self::SOURCE_GROUP);
} else {
$this->set(self::SOURCE_GROUP, $groupName);
}
}
/**
* Gets the name of the last group that execution was finished with an error or an exception.
*
* @return string|null
*/
public function getFailedGroup(): ?string
{
return $this->get(self::FAILED_GROUP);
}
/**
* Sets the name of the last group that execution was finished with an error or an exception.
*
* @param string|null $groupName
*/
public function setFailedGroup(?string $groupName): void
{
if (null === $groupName) {
$this->remove(self::FAILED_GROUP);
} else {
$this->set(self::FAILED_GROUP, $groupName);
}
}
}
|
##PubNub C-sharp-based APIs
Learn more at http://www.pubnub.com
## Subdirectory Description
This repo contains the following platform-specific subdirectories:
#### NugetPkg
The latest on Nuget
#### csharp.net
PubNub for MSVS C-Sharp / .net
#### iis
PubNub for the IIS web server
#### mono-for-android
PubNub for Xamarin's Mono for Android
#### monotouch
PubNub for Xamarin's Mono Touch
#### mono-for-linux
PubNub for Mono for Linux
#### GTK+
PubNub for GTK+ (see the demo made under [mono-for-linux](mono-for-linux/Demo/GTK%2B))
#### mono-for-mac
PubNub for Mono for Mac
#### silverlight
PubNub for Silverlight
#### unity
PubNub for Unity 3D
#### windows-phone-7
PubNub for WP7
|
import * as Helpers from '../src/helper';
import moment from 'moment';
const DATE_FORMAT = 'YYYY-MM-DD hh:mm A';
const formatDateString = (date: Date) => {
const parsedDate = moment(date);
return parsedDate.format(DATE_FORMAT);
};
// todo: add a mock Slack application instance for sending packets and listening to responses
beforeAll(() => {
process.env.SLACK_SIGNING_SECRET = 'my-test-secret';
process.env.SLACK_BOT_TOKEN = 'my-test-token';
});
describe('read time from message', () => {
it('should correctly parse the time in the message', () => {
const senderTimezoneLabel = 'Asia/Tokyo'; // GMT+9
const today = moment('2021-05-02 10:00 AM', DATE_FORMAT, true).tz(senderTimezoneLabel, true);
const eventTimestamp = today.clone().utc().unix(); // epoch time in seconds
const ambiguousMsg = '2 PM';
const casualMsg = 'from tomorrow after noon until next week';
const parsedTime = Helpers.parseTimeReference(ambiguousMsg, eventTimestamp, senderTimezoneLabel)[0];
const casualParse = Helpers.parseTimeReference(casualMsg, eventTimestamp, senderTimezoneLabel);
expect(formatDateString(parsedTime.start)).toEqual('2021-05-02 02:00 PM');
expect(moment(casualParse[0].start).date()).toEqual(3);
expect(moment(casualParse[1].start).date()).toEqual(9);
});
});
|
#!/usr/bin/env bash -x
source ~/env.sh
cd ${AMIGO_SRC}/user-service
mvn clean install -DskipTests
export DB="localhost"
java -jar target/user-service-1.0-SNAPSHOT.jar server config_dev.yml
|
#!/usr/bin/env bash
# tabulate tweet files
./tabulateTweets.py 'A' '09232017-09232017'
./tabulateTweets.py 'A' '09232017-09242017'
#./tabulateTweets.py 'A' '10022017-10082017'
#./tabulateTweets.py 'A' '10102017-10142017'
#./tabulateTweets.py 'A' '10152017-10192017'
|
# Encapsulates some user-oriented business logic
module UsersHelper
# Displays a gravatar, because people still use these, right?
def gravatar_url(email, size)
gravatar = Digest::MD5.hexdigest(email).downcase
"http://gravatar.com/avatar/#{gravatar}.png?s=#{size}"
end
def reinvite_user_link(user)
if user.reinvitable?
link_to tag.i("", class: 'fa fa-envelope', alt: "Re-send invitation", title: "Re-send invitation"), resend_user_invitation_organization_path(user_id: user.id), method: :post
end
end
end
|
# frozen_string_literal: true
Test::Container.register_provider(:client) do
module Test
class Client
end
end
start do
register(:client, Test::Client.new)
end
end
|
function initTask(subTask) {
subTask.gridInfos = {
hideSaveOrLoad: false,
actionDelay: 200,
buttonScaleDrawing: false,
includeBlocks: {
groupByCategory: false,
generatedBlocks: {
map: [
'clearMap',
'addLocation',
'addRoad',
'geoDistance',
'getLatitude',
'getLongitude',
'getNeighbors',
'shortestPath'
]
},
standardBlocks: {
includeAll: false
}
},
maxInstructions: 100,
checkEndEveryTurn: false,
checkEndCondition: function(context, lastTurn) {
context.success = true;
throw(strings.complete);
},
mapConfig: {
pin_file: 'img/pin.png',
map_file: 'img/carteDeFrance.png',
map_lng_left: -4.85,
map_lng_right: 9.65,
map_lat_top: 51.6,
map_lat_bottom: 41.7
}
}
subTask.data = {
easy: [{}]
}
initBlocklySubTask(subTask)
}
initWrapper(initTask, null, null)
|
import { Column, Entity, JoinColumn, ManyToOne, OneToMany, PrimaryGeneratedColumn, RelationId } from "typeorm";
import { Addresses } from "./Addresses";
import { Interventions } from "./Interventions";
import { Machines } from "./Machines";
import { Technicians } from "./Technicians";
@Entity("Taches")
export class Tasks {
@PrimaryGeneratedColumn({
name: "id",
type: "int"
})
public id?: number;
@ManyToOne(() => Technicians, (technician: Technicians) => technician.tasks, {})
@JoinColumn({ name: "ZoneResaId" })
public technician?: Technicians | null;
@RelationId((taches: Tasks) => taches.technician)
public technicianId?: number;
@ManyToOne(() => Technicians, (technician: Technicians) => technician.tasksInitiator, {})
@JoinColumn({ name: "Initiator" })
public initiator?: Technicians | null;
@RelationId((taches: Tasks) => taches.initiator)
public initiatorId?: number;
@ManyToOne(() => Machines, (machine: Machines) => machine.tasks, {})
@JoinColumn({ name: "MachineId" })
public machine?: Machines | null;
@RelationId((taches: Tasks) => taches.machine)
public machineId?: number;
@ManyToOne(() => Addresses, (address: Addresses) => address.tasks, {})
@JoinColumn({ name: "ClientsAddressid" })
public address?: Addresses | null;
@RelationId((taches: Tasks) => taches.address)
public addressId?: number;
@Column("datetimeoffset", {
name: "DateAppel",
nullable: false
})
public dateCall?: Date;
@Column("nvarchar", {
length: 100,
name: "TypeInter",
nullable: true
})
public taskType?: string | null;
@Column("nvarchar", {
length: 1500,
name: "DescriptionPanne",
nullable: true
})
public problem?: string | null;
@Column("int", {
default: () => "(0)",
name: "Priorite_Price",
nullable: true
})
public priority?: number | null;
@Column("datetimeoffset", {
name: "Date_Depan",
nullable: true
})
public dateFix?: Date | null;
@Column("float", {
default: () => "(0)",
name: "MO",
nullable: true,
precision: 53
})
public taskTime?: number | null;
@Column("nvarchar", {
length: 1500,
name: "DescriptionDepan",
nullable: true
})
public solution?: string | null;
@Column("bit", {
default: () => "(0)",
name: "SAVPayant",
nullable: true,
select: false
})
public SAVPayant?: boolean | null;
@Column("int", {
name: "Code",
nullable: true
})
public code?: number | null;
@Column("int", {
name: "GPSStatusCode",
nullable: true
})
public gpsStatusCode?: number | null;
@Column("float", {
name: "TechRating",
nullable: true,
precision: 53
})
public ratingTech?: number | null;
@Column("float", {
name: "ClientRating",
nullable: true,
precision: 53
})
public ratingClient?: number | null;
@Column("datetimeoffset", {
default: () => "sysdatetimeoffset()",
name: "__createdAt",
nullable: false
})
public createdAt?: Date;
@Column("datetimeoffset", {
default: () => "sysdatetimeoffset()",
name: "__updatedAt",
nullable: false
})
public updatedAt?: Date;
@Column("bit", {
default: () => "(0)",
name: "__deleted",
nullable: false
})
public deleted?: boolean;
@OneToMany(() => Interventions, (interventions: Interventions) => interventions.tasks)
public interventions?: Array<Interventions>;
public constructor(init?: Partial<Tasks>) {
Object.assign(this, init);
}
}
|
import vapoursynth as vs
import audiocutter
core = vs.core
ts = "cap/Senki Zesshou Symphogear XV - 01 (MX).d2v"
src = core.d2v.Source(ts)
src = src.vivtc.VFM(1).vivtc.VDecimate()
ac = audiocutter.AudioCutter()
audio = ac.split(src, [(812, 11288, "Intro"), (12966, 23349, "Part A"),
(24788, 34763, "Part B"), (34764, 37158, "ED"),
(37159, 37974, "Part C")])
ac.ready_qp_and_chapters(audio)
audio.set_output(0)
if __name__ == "__main__":
ac.cut_audio("mx_audio.m4a", audio_source="audio.aac")
|
/**
* @jest-environment jsdom
*/
import documentItem from '../../src/source/objects/documentItem'
test('documentItem has valid head', () => {
expect(documentItem.children[0].tagName).toBe('head')
})
test('documentItem has valid body', () => {
expect(documentItem.children[1].tagName).toBe('body')
})
|
module Solutions.Day20
( aoc20
) where
import Common.AoCSolutions (AoCSolution (MkAoCSolution),
printSolutions, printTestSolutions)
import Common.Geometry (Grid, Point,
enumerateMultilineStringToVectorMap,
gridNeighbours, neighbours)
import Control.Lens ((&))
import Data.List (maximumBy, minimumBy, nub, sort, sortBy)
import qualified Data.Map as M
import qualified Data.Map.Lazy as M
import qualified Data.Sequence as Seq
import qualified Data.Set as S
import Linear (V2 (V2))
import Text.Trifecta (CharParsing (anyChar, string), Parser,
Parsing (try), newline, some, whiteSpace)
aoc20 :: IO ()
aoc20 = do
printSolutions 20 $ MkAoCSolution parseInput part1
printSolutions 20 $ MkAoCSolution parseInput part2
type IEA = S.Set Integer
initIEA :: [Pixel] -> IEA
initIEA pixels =
S.fromList $ map snd . filter ((== LIGHT) . fst) $ zip pixels [0 ..]
data Pixel
= DARK
| LIGHT
deriving (Enum, Ord, Show, Eq)
type LightPixels = S.Set (V2 Int)
type Input = (Grid Pixel, IEA)
data ImageState =
MkState
{ _grid :: Grid Pixel
, _iea :: IEA
, _iteration :: Integer
}
deriving (Eq, Show)
parseInput :: Parser Input
parseInput = do
iea <- some $ try parsePixel
whiteSpace
rest <- some anyChar
let grid =
M.map (either error id . mapPixel) $
enumerateMultilineStringToVectorMap rest
pure (grid, initIEA iea)
where
parsePixel = do
p <- mapPixel <$> anyChar
either fail pure p
mapPixel p =
case p of
'#' -> Right LIGHT
'.' -> Right DARK
_ -> Left $ "unexpected character: " ++ [p]
part1 :: Input -> Int
part1 input = count
where
result = runInput 2 input
count = length $ M.filter (== LIGHT) result
part2 :: Input -> Int
part2 input = count
where
result = runInput 50 input
count = length $ M.filter (== LIGHT) result
enhance :: Pixel -> Grid Pixel -> IEA -> V2 Int -> Pixel
enhance default' grid iea point =
if decimal `S.member` iea
then LIGHT
else DARK
where
pts = sortBy orderPoints $ S.toList $ S.insert point (neighbours point)
decimal = toDecimal $ map (flip (M.findWithDefault default') grid) pts
orderPoints :: Point -> Point -> Ordering
orderPoints (V2 x1 y1) (V2 x2 y2) =
case compare y1 y2 of
EQ -> compare x1 x2
o -> o
toDecimal :: (Enum e) => [e] -> Integer
toDecimal = sum . (zipWith (*) [2 ^ n | n <- [0,1 ..]]) . reverse . asIntList
where
asIntList :: (Enum e) => [e] -> [Integer]
asIntList = map (toInteger . fromEnum)
stepImageState :: ImageState -> ImageState
stepImageState (MkState grid iea iteration) =
($!) MkState enhanced iea (iteration + 1)
where
enhanced = M.mapWithKey enhance' $ expand default' grid
enhance' point _ = enhance default' grid iea point
default' =
if even iteration
then DARK
else LIGHT
runInput :: Int -> Input -> Grid Pixel
runInput times (grid, iea) =
_grid $ iterate stepImageState initialState !! times
where
initialState = MkState grid iea 0
expand :: Pixel -> Grid Pixel -> Grid Pixel
expand default' grid =
M.fromList $ map (\p -> (p, M.findWithDefault default' p grid)) newRange
where
keys = M.keysSet grid
(V2 xtl ytl) = minimumBy orderPoints keys
(V2 xbr ybr) = maximumBy orderPoints keys
newRange = [V2 x y | x <- [xtl - 2 .. xbr + 2], y <- [ytl - 2 .. ybr + 2]]
renderGrid :: Grid Pixel -> Grid Char
renderGrid = M.map toCharacter
where
toCharacter ch =
case ch of
DARK -> '.'
LIGHT -> '#'
|
package scala
package collection
package immutable
import scala.collection.mutable.{Builder, ImmutableBuilder}
/**
* An immutable multidict
* @tparam K the type of keys
* @tparam V the type of values
*/
class MultiDict[K, V] private (elems: Map[K, Set[V]])
extends collection.MultiDict[K, V]
with Iterable[(K, V)]
with collection.MultiDictOps[K, V, MultiDict, MultiDict[K, V]]
with collection.IterableOps[(K, V), Iterable, MultiDict[K, V]] {
def sets: Map[K, Set[V]] = elems
override def multiDictFactory: MapFactory[MultiDict] = MultiDict
override protected def fromSpecific(coll: IterableOnce[(K, V)]): MultiDict[K, V] = multiDictFactory.from(coll)
override protected def newSpecificBuilder: mutable.Builder[(K, V), MultiDict[K, V]] = multiDictFactory.newBuilder[K, V]
override def empty: MultiDict[K, V] = multiDictFactory.empty
override def withFilter(p: ((K, V)) => Boolean): MultiDictOps.WithFilter[K, V, Iterable, MultiDict] =
new MultiDictOps.WithFilter(this, p)
/**
* @return a new multidict that contains all the entries of this multidict
* excepted the entry defined by the given `key` and `value`
*/
def remove(key: K, value: V): MultiDict[K, V] =
new MultiDict(elems.updatedWith(key) {
case Some(vs) =>
val updatedVs = vs - value
if (updatedVs.nonEmpty) Some(updatedVs) else None
case None => None
})
/** Alias for `remove` */
@`inline` final def - (kv: (K, V)): MultiDict[K, V] = remove(kv._1, kv._2)
/**
* @return a new multidict that contains all the entries of this multidict
* excepted those associated with the given `key`
*/
def removeKey(key: K): MultiDict[K, V] = new MultiDict(elems - key)
/** Alias for `removeKey` */
@`inline` final def -* (key: K): MultiDict[K, V] = removeKey(key)
/**
* @return a new multidict that contains all the entries of this multidict
* and the entry defined by the given `key` and `value`
*/
def add(key: K, value: V): MultiDict[K, V] =
new MultiDict(elems.updatedWith(key) {
case None => Some(Set(value))
case Some(vs) => Some(vs + value)
})
/** Alias for `add` */
@`inline` final def + (kv: (K, V)): MultiDict[K, V] = add(kv._1, kv._2)
}
object MultiDict extends MapFactory[MultiDict] {
def empty[K, V]: MultiDict[K, V] = new MultiDict[K, V](Map.empty)
def from[K, V](source: IterableOnce[(K, V)]): MultiDict[K, V] =
source match {
case mm: MultiDict[K, V] => mm
case _ => (newBuilder[K, V] ++= source).result()
}
def newBuilder[K, V]: Builder[(K, V), MultiDict[K, V]] =
new ImmutableBuilder[(K, V), MultiDict[K, V]](empty[K, V]) {
def addOne(elem: (K, V)): this.type = { elems = elems + elem; this }
}
}
|
# Copyright 2020 The TensorFlow Ranking Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""An example of using BERT output for finetuning a TF-Ranking model.
Please download `bert_config_file` and `bert_init_ckpt` from tensorflow models
website: https://github.com/tensorflow/models/tree/master/official/nlp/bert.
Note that those checkpoints are TF 2.x compatible, which are different from the
checkpoints downloaded here: https://github.com/google-research/bert. You may
convert a TF 1.x checkpoint to TF 2.x using `tf2_encoder_checkpoint_converter`
under https://github.com/tensorflow/models/tree/master/official/nlp/bert.
The following command downloads an uncased BERT-base model checkpoint for you:
mkdir /tmp/bert && \
wget https://storage.googleapis.com/cloud-tpu-checkpoints/bert/keras_bert/\
uncased_L-12_H-768_A-12.tar.gz -P /tmp/bert && \
tar -xvf /tmp/bert/uncased_L-12_H-768_A-12.tar.gz -C /tmp/bert/ && \
Then, use the following command to run training and evaluation locally with cpu
or gpu. For GPU, please add `CUDA_VISIBLE_DEVICES=0` and `--config=cuda`. The
example toy data contains 3 lists in train and test respectively. Due to the
large number of BERT parameters, if running into the `out-of-memory` issue,
plese see: https://github.com/google-research/bert#out-of-memory-issues.
BERT_DIR="/tmp/bert/uncased_L-12_H-768_A-12" && \
OUTPUT_DIR="/tmp/tfr/model/" && \
DATA_DIR="tensorflow_ranking/extension/testdata" && \
rm -rf "${OUTPUT_DIR}" && \
bazel build -c opt \
tensorflow_ranking/extension/examples:tfrbert_example_py_binary && \
./bazel-bin/tensorflow_ranking/extension/examples/tfrbert_example_py_binary \
--train_input_pattern=${DATA_DIR}/tfrbert_elwc_train.tfrecord \
--eval_input_pattern=${DATA_DIR}/tfrbert_elwc_test.tfrecord \
--bert_config_file=${BERT_DIR}/bert_config.json \
--bert_init_ckpt=${BERT_DIR}/bert_model.ckpt \
--bert_max_seq_length=128 \
--model_dir="${OUTPUT_DIR}" \
--list_size=3 \
--loss=softmax_loss \
--train_batch_size=8 \
--eval_batch_size=8 \
--learning_rate=1e-5 \
--num_train_steps=50 \
--num_eval_steps=10 \
--checkpoint_secs=120 \
--num_checkpoints=20
You can use TensorBoard to display the training results stored in $OUTPUT_DIR.
Notes:
* Use --alsologtostderr if the output is not printed into screen.
* The training and evaluation data should be stored in TFRecord format.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import tensorflow as tf
import tensorflow_ranking as tfr
from tensorflow_ranking.extension import tfrbert
flags.DEFINE_bool("local_training", True, "If true, run training locally.")
flags.DEFINE_string("train_input_pattern", "",
"Input file path pattern used for training.")
flags.DEFINE_string("eval_input_pattern", "",
"Input file path pattern used for eval.")
flags.DEFINE_float("learning_rate", 0.005, "Learning rate for the optimizer.")
flags.DEFINE_integer("train_batch_size", 8,
"Number of input records used per batch for training.")
flags.DEFINE_integer("eval_batch_size", 8,
"Number of input records used per batch for eval.")
flags.DEFINE_integer("checkpoint_secs", 120,
"Saves a model checkpoint every checkpoint_secs seconds.")
flags.DEFINE_integer("num_checkpoints", 1000,
"Saves at most num_checkpoints checkpoints in workspace.")
flags.DEFINE_integer(
"num_train_steps", 200000,
"Number of training iterations. Default means continuous training.")
flags.DEFINE_integer("num_eval_steps", 100, "Number of evaluation iterations.")
flags.DEFINE_string(
"loss", "softmax_loss",
"The RankingLossKey deciding the loss function used in training.")
flags.DEFINE_integer("list_size", None, "List size used for training.")
flags.DEFINE_bool("convert_labels_to_binary", False,
"If true, relevance labels are set to either 0 or 1.")
flags.DEFINE_string("model_dir", None, "Output directory for models.")
flags.DEFINE_float("dropout_rate", 0.1, "The dropout rate.")
# The followings are BERT related flags.
flags.DEFINE_string(
"bert_config_file", None,
"The config json file corresponding to the pre-trained BERT model. This "
"specifies the model architecture. Please download the model from "
"the link: https://github.com/google-research/bert")
flags.DEFINE_string(
"bert_init_ckpt", None,
"Initial checkpoint from a pre-trained BERT model. Please download from "
"the link: https://github.com/google-research/bert")
flags.DEFINE_integer(
"bert_max_seq_length", 512,
"The maximum input sequence length (#words) after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded.")
flags.DEFINE_integer(
"bert_num_warmup_steps", 10000,
"This is used for adjust learning rate. If global_step < num_warmup_steps, "
"the learning rate will be `global_step/num_warmup_steps * init_lr`. This "
"is implemented in the bert/optimization.py file.")
FLAGS = flags.FLAGS
_SIZE = "example_list_size"
_NETWORK_NAME = "tfrbert"
def context_feature_columns():
"""Returns context feature names to column definitions."""
return {}
def example_feature_columns():
"""Returns example feature names to column definitions.
`input_ids`, `input_mask` and `segment_ids` are derived from query-document
pair sequence: [CLS] all query tokens [SEP] all document tokens [SEP]. The
original tokens are mapped to ids (based on BERT vocabulary) in `input_ids`.
"""
feature_columns = {}
feature_columns.update({
"input_ids":
tf.feature_column.numeric_column(
"input_ids",
shape=(FLAGS.bert_max_seq_length,),
default_value=0,
dtype=tf.int64),
"input_mask":
tf.feature_column.numeric_column(
"input_mask",
shape=(FLAGS.bert_max_seq_length,),
default_value=0,
dtype=tf.int64),
"segment_ids":
tf.feature_column.numeric_column(
"segment_ids",
shape=(FLAGS.bert_max_seq_length,),
default_value=0,
dtype=tf.int64),
})
return feature_columns
def get_estimator(hparams):
"""Create Keras ranking estimator."""
util = tfrbert.TFRBertUtil(
bert_config_file=hparams.get("bert_config_file"),
bert_init_ckpt=hparams.get("bert_init_ckpt"),
bert_max_seq_length=hparams.get("bert_max_seq_length"))
network = tfrbert.TFRBertRankingNetwork(
context_feature_columns=context_feature_columns(),
example_feature_columns=example_feature_columns(),
bert_config_file=hparams.get("bert_config_file"),
bert_max_seq_length=hparams.get("bert_max_seq_length"),
bert_output_dropout=hparams.get("dropout_rate"),
name=_NETWORK_NAME)
loss = tfr.keras.losses.get(
hparams.get("loss"),
reduction=tf.compat.v2.losses.Reduction.SUM_OVER_BATCH_SIZE)
metrics = tfr.keras.metrics.default_keras_metrics()
config = tf.estimator.RunConfig(
model_dir=hparams.get("model_dir"),
keep_checkpoint_max=hparams.get("num_checkpoints"),
save_checkpoints_secs=hparams.get("checkpoint_secs"))
optimizer = util.create_optimizer(
init_lr=hparams.get("learning_rate"),
train_steps=hparams.get("num_train_steps"),
warmup_steps=hparams.get("bert_num_warmup_steps"))
ranker = tfr.keras.model.create_keras_model(
network=network,
loss=loss,
metrics=metrics,
optimizer=optimizer,
size_feature_name=_SIZE)
return tfr.keras.estimator.model_to_estimator(
model=ranker,
model_dir=hparams.get("model_dir"),
config=config,
warm_start_from=util.get_warm_start_settings(exclude=_NETWORK_NAME))
def train_and_eval():
"""Runs the training and evaluation jobs for a BERT ranking model."""
# The below contains a set of common flags for a TF-Ranking model. You need to
# include all of them for adopting the `RankingPipeline`.
hparams = dict(
train_input_pattern=FLAGS.train_input_pattern,
eval_input_pattern=FLAGS.eval_input_pattern,
learning_rate=FLAGS.learning_rate,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
checkpoint_secs=FLAGS.checkpoint_secs,
num_checkpoints=FLAGS.num_checkpoints,
num_train_steps=FLAGS.num_train_steps,
num_eval_steps=FLAGS.num_eval_steps,
loss=FLAGS.loss,
dropout_rate=FLAGS.dropout_rate,
list_size=FLAGS.list_size,
listwise_inference=True, # Only supports `True` in keras Ranking Network.
convert_labels_to_binary=FLAGS.convert_labels_to_binary,
model_dir=FLAGS.model_dir,
bert_config_file=FLAGS.bert_config_file,
bert_init_ckpt=FLAGS.bert_init_ckpt,
bert_max_seq_length=FLAGS.bert_max_seq_length,
bert_num_warmup_steps=FLAGS.bert_num_warmup_steps)
bert_ranking_pipeline = tfr.ext.pipeline.RankingPipeline(
context_feature_columns=context_feature_columns(),
example_feature_columns=example_feature_columns(),
hparams=hparams,
estimator=get_estimator(hparams),
label_feature_name="relevance",
label_feature_type=tf.int64,
size_feature_name=_SIZE)
bert_ranking_pipeline.train_and_eval(local_training=FLAGS.local_training)
def main(_):
train_and_eval()
if __name__ == "__main__":
tf.compat.v1.app.run()
|
using System.Reflection;
using System.Threading.Tasks;
using Tharga.Quilt4Net.DataTransfer;
namespace Tharga.Quilt4Net
{
public static partial class Session
{
public static async Task<SessionResponse> RegisterAsync(Assembly firstAssembly)
{
return await Task<SessionResponse>.Factory.StartNew(() => Register(firstAssembly));
}
public static async Task<SessionResponse> RegisterAsync()
{
return await Task<SessionResponse>.Factory.StartNew(Register);
}
}
}
|
const express = require('express')
const staticPath = `${__dirname}/../../static`
const fileServer = express.static(staticPath)
module.exports = fileServer
|
namespace CIOSDigital.FlightPlanner.Model
{
public struct Coordinate
{
private const int precision = 1000000;
public decimal Latitude { get; }
public decimal Longitude { get; }
public string dmsLatitude { get; }
public string dmsLongitude { get; }
public Coordinate(Coordinate coord)
{
this = new Coordinate(coord.Latitude, coord.Longitude);
}
public Coordinate(string latitude, string longitude)
{
this.dmsLatitude = latitude.Replace("\'", "");
this.dmsLongitude = longitude.Replace("\'", "");
this.Latitude = 0;
this.Longitude = 0;
decimal lat = strtodecLattitude(latitude);
decimal longi = strtodecLongitude(longitude);
this = new Coordinate(lat, longi);
}
public Coordinate(decimal latitude, decimal longitude)
{
bool PNW = true;
this.Latitude = 0;
this.Longitude = 0;
if (PNW)
{
this.Latitude = System.Math.Abs(System.Math.Truncate(latitude * precision) / precision);
this.Longitude = System.Math.Abs(System.Math.Truncate(longitude * precision) / precision) * -1;
}
if (this.Latitude < -90 || this.Latitude > 80 || this.Longitude < -180 || this.Longitude > 180)
throw new System.ArgumentOutOfRangeException("");
this.dmsLatitude = "";
this.dmsLongitude = "";
this.dmsLatitude = dectostringLatitude(this.Latitude);
this.dmsLongitude = dectostringLongitude(this.Longitude);
}
public override bool Equals(object obj)
{
return (obj as Coordinate? != null)
? this.Equals((Coordinate)obj)
: false;
}
public bool Equals(Coordinate other)
{
return this.Latitude == other.Latitude
&& this.Longitude == other.Longitude;
}
public override int GetHashCode()
{
return this.Latitude.GetHashCode() ^ this.Longitude.GetHashCode();
}
public static bool operator ==(Coordinate left, Coordinate right)
{
return left.Equals(right);
}
public static bool operator !=(Coordinate left, Coordinate right)
{
return !left.Equals(right);
}
public override string ToString()
{
return dmsLatitude + " " + dmsLongitude;
}
private string decimalToDMS(decimal latitude, decimal longitude)
{
return dectostringLatitude(latitude) + " " + dectostringLongitude(longitude);
}
private string dectostringLatitude(decimal lat)
{
int d;
double m;
string sign;
d = (int)lat;
m = (double)(lat - d) * 60;
sign = (d < 0) ? "S" : "N";
return string.Format("{0}°{1:0.00}'{2}", System.Math.Abs(d), System.Math.Abs(m), sign);
}
private string dectostringLongitude(decimal longi)
{
int d;
double m;
string sign;
d = (int)longi;
m = (double)(longi - d) * 60;
sign = (d < 0) ? "W" : "E";
return string.Format("{0}°{1:0.00}'{2}", System.Math.Abs(d), System.Math.Abs(m), sign);
}
private decimal strtodecLattitude(string lat)
{
decimal latitude;
decimal deg;
decimal min;
if (!lat.Contains("°"))
lat += "°0";
if (!lat.Contains("."))
lat += ".0";
if (!lat.Contains("'"))
lat += "'N";
string[] seperatedString = lat.Split(new char[] { '°', '\'' });
System.Decimal.TryParse(seperatedString[0], out deg);
System.Decimal.TryParse(seperatedString[1], out min);
if ((deg < -90 || deg > 80))
{
throw new System.ArgumentOutOfRangeException("latitude");
}
latitude = System.Math.Abs(deg) + min / 60;
if (seperatedString.Length==3 && seperatedString[2] == "S")
latitude *= -1;
return latitude;
}
private decimal strtodecLongitude(string longi)
{
decimal longitude;
decimal deg, min;
if (!longi.Contains("°"))
longi += "°0";
if (!longi.Contains("."))
longi += ".0";
if (!longi.Contains("'"))
longi += "'W";
string[] seperatedString = longi.Split(new char[] { '°', '\'' });
System.Decimal.TryParse(seperatedString[0], out deg);
System.Decimal.TryParse(seperatedString[1], out min);
longitude = System.Math.Abs(deg) + min / 60;
if (seperatedString[2] == "W")
longitude *= -1;
if ((deg < -180 || deg > 180))
{
throw new System.ArgumentOutOfRangeException("longitude");
}
return longitude;
}
}
}
|
// Copyright (c) 2020-2021 Yinsen (Tesla) Zhang.
// Use of this source code is governed by the MIT license that can be found in the LICENSE.md file.
package org.aya.tyck.pat;
import kala.collection.SeqView;
import kala.collection.immutable.ImmutableSeq;
import kala.collection.mutable.DynamicSeq;
import org.aya.concrete.Pattern;
import org.aya.core.pat.Pat;
import org.aya.core.term.Term;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import java.util.function.Consumer;
import java.util.function.Function;
/**
* Multi-case trees.
*
* @author ice1000
*/
public sealed interface MCT {
static @NotNull ImmutableSeq<SubPats> extract(PatClass pats, @NotNull ImmutableSeq<SubPats> subPatsSeq) {
return pats.contents().map(subPatsSeq::get);
}
default @NotNull ImmutableSeq<PatClass> toSeq() {
var buffer = DynamicSeq.<PatClass>create();
forEach(buffer::append);
return buffer.toImmutableSeq();
}
void forEach(@NotNull Consumer<PatClass> f);
@NotNull MCT map(@NotNull Function<PatClass, PatClass> f);
@NotNull MCT flatMap(@NotNull Function<PatClass, MCT> f);
sealed interface PatClass extends MCT {
@NotNull ImmutableSeq<Integer> contents();
@NotNull MCT propagate(@NotNull MCT mct);
@Override default void forEach(@NotNull Consumer<PatClass> f) {
f.accept(this);
}
@Override default @NotNull PatClass map(@NotNull Function<PatClass, PatClass> f) {
return f.apply(this);
}
@Override default @NotNull MCT flatMap(@NotNull Function<PatClass, MCT> f) {
return f.apply(this);
}
}
record Leaf(@NotNull ImmutableSeq<Integer> contents) implements PatClass {
@Override public @NotNull MCT propagate(@NotNull MCT mct) {
return mct;
}
}
record Error(
@NotNull ImmutableSeq<Integer> contents,
@NotNull ImmutableSeq<Pattern> errorMessage
) implements PatClass {
@Override public @NotNull MCT propagate(@NotNull MCT mct) {
return mct.map(newClz -> new Error(newClz.contents(), errorMessage));
}
}
record Node(@NotNull Term type, @NotNull ImmutableSeq<MCT> children) implements MCT {
@Override public void forEach(@NotNull Consumer<PatClass> f) {
children.forEach(child -> child.forEach(f));
}
@Override public @NotNull Node map(@NotNull Function<PatClass, PatClass> f) {
return new Node(type, children.map(child -> child.map(f)));
}
@Override public @NotNull Node flatMap(@NotNull Function<PatClass, MCT> f) {
return new Node(type, children.map(child -> child.flatMap(f)));
}
}
record SubPats(@NotNull SeqView<Pat> pats, int ix) {
@Contract(pure = true) public @NotNull Pat head() {
// This 'inline' is actually a 'dereference'
return pats.first().inline();
}
@Contract(pure = true) public @NotNull SubPats drop() {
return new SubPats(pats.drop(1), ix);
}
}
}
|
ALTER TABLE `qb_fenlei_module` ADD `haibao` VARCHAR( 255 ) NOT NULL COMMENT '海报模板路径,多个用逗号隔开';
ALTER TABLE `qb_fenlei_sort` ADD `haibao` VARCHAR( 255 ) NOT NULL COMMENT '海报模板路径,多个用逗号隔开';
|
```div-parameter
## Parameter Console
| Parameter | Format | Default | Mandatory | Description |
| --- | --- | :---: | :---: | --- |
| markdown | <dt><Boolean> | true | yes | <dt>true<dd><dt>false<dd> |
```
@@include(../../core/dom/dom_p.md)
|
// Copyright 2019 themis.rs maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate criterion;
use criterion::{AxisScale, BenchmarkId, Criterion, PlotConfiguration, Throughput};
use soter::rand;
fn bytes(c: &mut Criterion) {
let sizes = &[0, 8, 64, 512, 4096, 32768, 262_144, 2_097_152];
let mut buffer = vec![0; *sizes.iter().max().unwrap()];
let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Logarithmic);
let mut group = c.benchmark_group("rand::bytes()");
group.plot_config(plot_config);
for size in sizes {
group.throughput(Throughput::Bytes(*size as u64));
group.bench_with_input(BenchmarkId::from_parameter(size), size, |b, &size| {
b.iter(|| rand::bytes(&mut buffer[0..size]));
});
}
group.finish();
}
criterion_group!(soter_rand, bytes);
criterion_main!(soter_rand);
|
require 'mspec/utils/version'
require 'mspec/guards/guard'
class Object
# Accepts either a single argument or an Array of arguments. If RUBY_VERSION
# is less than 1.9, converts the argument(s) to Strings; otherwise, converts
# the argument(s) to Symbols.
#
# If one argument is passed, the converted argument is returned. If an Array
# is passed, an Array is returned.
#
# For example, if RUBY_VERSION == 1.8.7
#
# stasy(:some) => "some"
# stasy("nom") => "nom"
#
# while if RUBY_VERSION == 1.9.0
#
# stasy(:some) => :some
# stasy("nom") => :nom
def stasy(one, *rest)
era = SpecVersion.new(SpecGuard.ruby_version) < "1.9"
convert = era ? :to_s : :to_sym
one = one.send convert
if rest.empty?
one
else
[one].concat rest.map { |x| x.send convert }
end
end
end
|
package Paws::MTurk::ReviewResultDetail;
use Moose;
has ActionId => (is => 'ro', isa => 'Str');
has Key => (is => 'ro', isa => 'Str');
has QuestionId => (is => 'ro', isa => 'Str');
has SubjectId => (is => 'ro', isa => 'Str');
has SubjectType => (is => 'ro', isa => 'Str');
has Value => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::MTurk::ReviewResultDetail
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::MTurk::ReviewResultDetail object:
$service_obj->Method(Att1 => { ActionId => $value, ..., Value => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::MTurk::ReviewResultDetail object:
$result = $service_obj->Method(...);
$result->Att1->ActionId
=head1 DESCRIPTION
This data structure is returned multiple times for each result
specified in the Review Policy.
=head1 ATTRIBUTES
=head2 ActionId => Str
A unique identifier of the Review action result.
=head2 Key => Str
Key identifies the particular piece of reviewed information.
=head2 QuestionId => Str
Specifies the QuestionId the result is describing. Depending on whether
the TargetType is a HIT or Assignment this results could specify
multiple values. If TargetType is HIT and QuestionId is absent, then
the result describes results of the HIT, including the HIT agreement
score. If ObjectType is Assignment and QuestionId is absent, then the
result describes the Worker's performance on the HIT.
=head2 SubjectId => Str
The HITID or AssignmentId about which this result was taken. Note that
HIT-level Review Policies will often emit results about both the HIT
itself and its Assignments, while Assignment-level review policies
generally only emit results about the Assignment itself.
=head2 SubjectType => Str
The type of the object from the SubjectId field.
=head2 Value => Str
The values of Key provided by the review policies you have selected.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::MTurk>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.