text
stringlengths 27
775k
|
---|
typedef int *(*intpa3p_t)[3];
typedef int *intpa3_t[3];
int main() {
int x = 3;
intpa3_t f = {&x, &x, &x,};
intpa3p_t g = &f;
int y = *(*g)[1];
return 0;
}
|
import React from "react";
function Footer() {
return (
<footer className="footer d-flex align-items-end">
<div className="d-flex flex-column" style={{ height: 70 }}>
<div>
</div>
</div>
<div className="d-flex align-items-start">
<div>© {new Date().getFullYear()} Tokenaro.com</div>
</div>
</footer>
);
}
export { Footer };
|
import Component from 'ember-component';
import layout from '../templates/components/tta-if-resolved';
import { task } from 'ember-concurrency';
import get from 'ember-metal/get';
export default Component.extend({
layout,
tagName: '',
resolveTask: task(function *() {
return yield get(this, 'promise');
}).keepLatest().on('didReceiveAttrs')
}).reopenClass({
positionalParams: ['promise']
});
|
// Copyright 2020 Contributors to the Parsec project.
// SPDX-License-Identifier: Apache-2.0
use std::convert::TryFrom;
use tss_esapi::constants::*;
use tss_esapi::tss2_esys::TPM2_ALG_ID;
use tss_esapi::utils::algorithm_specifiers::*;
mod test_object_type {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(Into::<TPM2_ALG_ID>::into(ObjectType::Null), TPM2_ALG_NULL);
assert_eq!(Into::<TPM2_ALG_ID>::into(ObjectType::Rsa), TPM2_ALG_RSA);
assert_eq!(Into::<TPM2_ALG_ID>::into(ObjectType::Ecc), TPM2_ALG_ECC);
assert_eq!(
Into::<TPM2_ALG_ID>::into(ObjectType::KeyedHash),
TPM2_ALG_KEYEDHASH
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(ObjectType::SymCipher),
TPM2_ALG_SYMCIPHER
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
ObjectType::try_from(TPM2_ALG_NULL).unwrap(),
ObjectType::Null
);
assert_eq!(ObjectType::try_from(TPM2_ALG_RSA).unwrap(), ObjectType::Rsa);
assert_eq!(ObjectType::try_from(TPM2_ALG_ECC).unwrap(), ObjectType::Ecc);
assert_eq!(
ObjectType::try_from(TPM2_ALG_KEYEDHASH).unwrap(),
ObjectType::KeyedHash
);
assert_eq!(
ObjectType::try_from(TPM2_ALG_SYMCIPHER).unwrap(),
ObjectType::SymCipher
);
assert!(
ObjectType::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in ObjectType"
);
}
}
mod test_asymmetric_algorithm {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(AsymmetricAlgorithm::Rsa),
TPM2_ALG_RSA
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(AsymmetricAlgorithm::Ecc),
TPM2_ALG_ECC
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
AsymmetricAlgorithm::try_from(TPM2_ALG_RSA).unwrap(),
AsymmetricAlgorithm::Rsa
);
assert_eq!(
AsymmetricAlgorithm::try_from(TPM2_ALG_ECC).unwrap(),
AsymmetricAlgorithm::Ecc
);
assert!(
AsymmetricAlgorithm::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in AsymmetricAlgorithm"
);
}
}
mod test_keyed_hash {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(Into::<TPM2_ALG_ID>::into(KeyedHash::Hmac), TPM2_ALG_HMAC);
assert_eq!(Into::<TPM2_ALG_ID>::into(KeyedHash::Xor), TPM2_ALG_XOR);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(KeyedHash::try_from(TPM2_ALG_HMAC).unwrap(), KeyedHash::Hmac);
assert_eq!(KeyedHash::try_from(TPM2_ALG_XOR).unwrap(), KeyedHash::Xor);
assert!(
KeyedHash::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in KeyedHash"
);
}
}
mod test_symmetric_algorithm {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(SymmetricAlgorithm::Aes),
TPM2_ALG_AES
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(SymmetricAlgorithm::Camellia),
TPM2_ALG_CAMELLIA
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(SymmetricAlgorithm::Sm4),
TPM2_ALG_SM4
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
SymmetricAlgorithm::try_from(TPM2_ALG_AES).unwrap(),
SymmetricAlgorithm::Aes
);
assert_eq!(
SymmetricAlgorithm::try_from(TPM2_ALG_CAMELLIA).unwrap(),
SymmetricAlgorithm::Camellia
);
assert_eq!(
SymmetricAlgorithm::try_from(TPM2_ALG_SM4).unwrap(),
SymmetricAlgorithm::Sm4
);
assert!(
SymmetricAlgorithm::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in SymmetricAlgorithm"
);
}
}
mod test_hashing_algorithm {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sha1),
TPM2_ALG_SHA1
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sha256),
TPM2_ALG_SHA256
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sha384),
TPM2_ALG_SHA384
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sha512),
TPM2_ALG_SHA512
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sm3_256),
TPM2_ALG_SM3_256
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sha3_256),
TPM2_ALG_SHA3_256
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sha3_384),
TPM2_ALG_SHA3_384
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(HashingAlgorithm::Sha3_512),
TPM2_ALG_SHA3_512
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SHA1).unwrap(),
HashingAlgorithm::Sha1
);
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SHA256).unwrap(),
HashingAlgorithm::Sha256
);
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SHA384).unwrap(),
HashingAlgorithm::Sha384
);
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SHA512).unwrap(),
HashingAlgorithm::Sha512
);
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SM3_256).unwrap(),
HashingAlgorithm::Sm3_256
);
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SHA3_256).unwrap(),
HashingAlgorithm::Sha3_256
);
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SHA3_384).unwrap(),
HashingAlgorithm::Sha3_384
);
assert_eq!(
HashingAlgorithm::try_from(TPM2_ALG_SHA3_512).unwrap(),
HashingAlgorithm::Sha3_512
);
assert!(
HashingAlgorithm::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in HashingAlgorithm"
);
}
}
mod test_signature_scheme {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(SignatureScheme::RsaSsa),
TPM2_ALG_RSASSA
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(SignatureScheme::RsaPss),
TPM2_ALG_RSAPSS
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(SignatureScheme::EcDsa),
TPM2_ALG_ECDSA
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(SignatureScheme::EcDaa),
TPM2_ALG_ECDAA
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(SignatureScheme::EcSchnorr),
TPM2_ALG_ECSCHNORR
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(SignatureScheme::Sm2),
TPM2_ALG_SM2
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
SignatureScheme::try_from(TPM2_ALG_RSASSA).unwrap(),
SignatureScheme::RsaSsa
);
assert_eq!(
SignatureScheme::try_from(TPM2_ALG_RSAPSS).unwrap(),
SignatureScheme::RsaPss
);
assert_eq!(
SignatureScheme::try_from(TPM2_ALG_ECDSA).unwrap(),
SignatureScheme::EcDsa
);
assert_eq!(
SignatureScheme::try_from(TPM2_ALG_ECDAA).unwrap(),
SignatureScheme::EcDaa
);
assert_eq!(
SignatureScheme::try_from(TPM2_ALG_ECSCHNORR).unwrap(),
SignatureScheme::EcSchnorr
);
assert_eq!(
SignatureScheme::try_from(TPM2_ALG_SM2).unwrap(),
SignatureScheme::Sm2
);
assert!(
SignatureScheme::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in SignatureScheme"
);
}
}
mod test_rsa_signature_scheme {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(RsaSignatureScheme::RsaPss),
TPM2_ALG_RSAPSS
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(RsaSignatureScheme::RsaSsa),
TPM2_ALG_RSASSA
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
RsaSignatureScheme::try_from(TPM2_ALG_RSASSA).unwrap(),
RsaSignatureScheme::RsaSsa
);
assert_eq!(
RsaSignatureScheme::try_from(TPM2_ALG_RSAPSS).unwrap(),
RsaSignatureScheme::RsaPss
);
assert!(
RsaSignatureScheme::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in RsaSignatureScheme"
);
}
}
mod test_ecc_signature_scheme {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(EccSignatureScheme::EcDsa),
TPM2_ALG_ECDSA
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(EccSignatureScheme::EcDaa),
TPM2_ALG_ECDAA
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(EccSignatureScheme::EcSchnorr),
TPM2_ALG_ECSCHNORR
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(EccSignatureScheme::Sm2),
TPM2_ALG_SM2
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
EccSignatureScheme::try_from(TPM2_ALG_ECDSA).unwrap(),
EccSignatureScheme::EcDsa
);
assert_eq!(
EccSignatureScheme::try_from(TPM2_ALG_ECDAA).unwrap(),
EccSignatureScheme::EcDaa
);
assert_eq!(
EccSignatureScheme::try_from(TPM2_ALG_ECSCHNORR).unwrap(),
EccSignatureScheme::EcSchnorr
);
assert_eq!(
EccSignatureScheme::try_from(TPM2_ALG_SM2).unwrap(),
EccSignatureScheme::Sm2
);
assert!(
EccSignatureScheme::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in EccSignatureScheme"
);
}
}
mod test_asymmetric_encrytion_scheme {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(AsymmetricEncryptionScheme::Oaep),
TPM2_ALG_OAEP
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(AsymmetricEncryptionScheme::RsaEs),
TPM2_ALG_RSAES
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(AsymmetricEncryptionScheme::EcDh),
TPM2_ALG_ECDH
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
AsymmetricEncryptionScheme::try_from(TPM2_ALG_OAEP).unwrap(),
AsymmetricEncryptionScheme::Oaep
);
assert_eq!(
AsymmetricEncryptionScheme::try_from(TPM2_ALG_RSAES).unwrap(),
AsymmetricEncryptionScheme::RsaEs
);
assert_eq!(
AsymmetricEncryptionScheme::try_from(TPM2_ALG_ECDH).unwrap(),
AsymmetricEncryptionScheme::EcDh
);
assert!(
AsymmetricEncryptionScheme::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in AsymmetricEncryptionScheme"
);
}
}
mod test_encryption_mode {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(Into::<TPM2_ALG_ID>::into(EncryptionMode::Ctr), TPM2_ALG_CTR);
assert_eq!(Into::<TPM2_ALG_ID>::into(EncryptionMode::Ofb), TPM2_ALG_OFB);
assert_eq!(Into::<TPM2_ALG_ID>::into(EncryptionMode::Cbc), TPM2_ALG_CBC);
assert_eq!(Into::<TPM2_ALG_ID>::into(EncryptionMode::Cfb), TPM2_ALG_CFB);
assert_eq!(Into::<TPM2_ALG_ID>::into(EncryptionMode::Ecb), TPM2_ALG_ECB);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
EncryptionMode::try_from(TPM2_ALG_CTR).unwrap(),
EncryptionMode::Ctr
);
assert_eq!(
EncryptionMode::try_from(TPM2_ALG_OFB).unwrap(),
EncryptionMode::Ofb
);
assert_eq!(
EncryptionMode::try_from(TPM2_ALG_CBC).unwrap(),
EncryptionMode::Cbc
);
assert_eq!(
EncryptionMode::try_from(TPM2_ALG_CFB).unwrap(),
EncryptionMode::Cfb
);
assert_eq!(
EncryptionMode::try_from(TPM2_ALG_ECB).unwrap(),
EncryptionMode::Ecb
);
assert!(
EncryptionMode::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in EncryptionMode"
);
}
}
mod test_mask_generation_function {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(MaskGenerationFunction::Mgf1),
TPM2_ALG_MGF1
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
MaskGenerationFunction::try_from(TPM2_ALG_MGF1).unwrap(),
MaskGenerationFunction::Mgf1
);
assert!(
MaskGenerationFunction::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in MaskGenerationFunction"
);
}
}
mod test_key_derivation_function {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(KeyDerivationFunction::Kdf1Sp800_56a),
TPM2_ALG_KDF1_SP800_56A
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(KeyDerivationFunction::Kdf2),
TPM2_ALG_KDF2
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(KeyDerivationFunction::Kdf1Sp800_108),
TPM2_ALG_KDF1_SP800_108
);
assert_eq!(
Into::<TPM2_ALG_ID>::into(KeyDerivationFunction::EcMqv),
TPM2_ALG_ECMQV
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
KeyDerivationFunction::try_from(TPM2_ALG_KDF1_SP800_56A).unwrap(),
KeyDerivationFunction::Kdf1Sp800_56a
);
assert_eq!(
KeyDerivationFunction::try_from(TPM2_ALG_KDF2).unwrap(),
KeyDerivationFunction::Kdf2
);
assert_eq!(
KeyDerivationFunction::try_from(TPM2_ALG_KDF1_SP800_108).unwrap(),
KeyDerivationFunction::Kdf1Sp800_108
);
assert_eq!(
KeyDerivationFunction::try_from(TPM2_ALG_ECMQV).unwrap(),
KeyDerivationFunction::EcMqv
);
assert!(
EncryptionMode::try_from(TPM2_ALG_ERROR).is_err(),
"Error should not exist in KeyDerivationFunction"
);
}
}
mod test_algorithmic_error {
use super::*;
#[test]
fn test_into_alogithm_id() {
assert_eq!(
Into::<TPM2_ALG_ID>::into(AlgorithmicError::Error),
TPM2_ALG_ERROR
);
}
#[test]
fn test_try_from_alogithm_id() {
assert_eq!(
AlgorithmicError::try_from(TPM2_ALG_ERROR).unwrap(),
AlgorithmicError::Error
);
}
}
|
package name.alatushkin.api.vk.generated.widgets
import name.alatushkin.api.vk.api.VkDate
import name.alatushkin.api.vk.generated.users.UserFull
open class CommentRepliesItem(
val cid: Long? = null,
val uid: Long? = null,
val date: VkDate? = null,
val text: String? = null,
val likes: WidgetLikes? = null,
val user: UserFull? = null
)
|
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: AGPL-3.0
class EnforceUniqueIdentityUrl < ActiveRecord::Migration[5.0]
def change
add_index :users, [:identity_url], :unique => true
end
end
|
# Ansible variables to set up
* `wsid_install_dir` (default: /opt/wsid-server)
* `wsid_identity_url` -- (required!) https url which application will use
* `wsid_port` -- local port to bind to
* `wsid_allowed_users` -- system users exclusively allowed to connect to port (nginx must be included)
|
subroutine setup_map()
implicit none
integer smallend(2),bigend(2)
integer mgrid, ngrid, ntotal
double precision r1
integer n, m
c # Number of boxes
call create_boxes()
open(10,file='boxes.dat')
read(10,*) ntotal
read(10,*) mgrid
c read(10,*) ngrid
c read(10,*) r1
do n = 1,ntotal
read(10,*) smallend(1),smallend(2), ngrid, r1
do m = 1,2
bigend(m) = smallend(m) + ngrid
enddo
call add_box(Mgrid,smallend,bigend,r1,ngrid,n)
enddo
close(10)
end
subroutine create_boxes()
implicit none
integer n_box_com
common /combox0/ n_box_com
n_box_com = 0
end
subroutine add_box(mgrid,smallend,bigend,r1,ngrid,i)
implicit none
integer Mgrid, smallend(2), bigend(2), ngrid,i
double precision boxes(5), r1
double precision xlow,ylow,xhi,yhi,h
double precision boxes_com(6,100)
integer n_box_com
common /combox0/ n_box_com
common /combox1/ boxes_com
n_box_com = n_box_com + 1
h = 2.d0/mgrid
xlow = -1 + (smallend(1)-1)*h
xhi = -1 + (bigend(1)-1)*h
ylow = -1 + (smallend(2)-1)*h
yhi = -1 + (bigend(2)-1)*h
boxes_com(1,i) = xlow
boxes_com(2,i) = ylow
boxes_com(3,i) = xhi
boxes_com(4,i) = yhi
boxes_com(5,i) = r1
boxes_com(6,i) = ngrid
end
subroutine check_boxes(dx,dy)
implicit none
double precision dx,dy
integer n, m
logical check(4)
double precision xlow,ylow,xhi,yhi,r1,ts
double precision ndx,ndy, rx,ry
double precision ndx1,ndy1, rx1, ry1
double precision boxes_com(6,100)
integer n_box_com
common /combox0/ n_box_com
common /combox1/ boxes_com
c # Only print out information for one box, since we assume
c # all boxes are the same size now.
do n = 1,n_box_com
xlow = boxes_com(1,n)
ylow = boxes_com(2,n)
xhi = boxes_com(3,n)
yhi = boxes_com(4,n)
r1 = boxes_com(5,n)
c ngrid = boxes_com(6,n)
ndx = (xhi - xlow)/dx
ndy = (yhi - ylow)/dy
ndx1 = nint(ndx*ts)/ts
ndy1 = nint(ndy*ts)/ts
ts = 1.d5
rx = r1*ndx
ry = r1*ndy
rx1 = nint(rx*ts)/ts
ry1 = nint(ry*ts)/ts
check(1) = abs(ndx - ndx1) .lt. 1d-8
check(2) = abs(ndy - ndy1) .lt. 1d-8
check(3) = abs(rx - rx1) .lt. 1d-8
check(4) = abs(ry - ry1) .lt. 1d-8
write(6,*) ' '
write(6,95) '------------'
write(6,90) 'Box ', n
write(6,95) '------------'
90 format(A,I2)
95 format(A)
if (check(1)) then
write(6,100) 'Square : x direction',nint(ndx)
else
write(6,110) 'Square : x direction',ndx
endif
if (check(2)) then
write(6,100) 'Square : y direction',nint(ndy)
else
write(6,110) 'Square : y direction',ndy
endif
if (check(3)) then
write(6,100) 'Circle : x direction',nint(rx)
else
write(6,110) 'Circle : x direction',rx
endif
if (check(4)) then
write(6,100) 'Circle : y direction',nint(ry)
else
write(6,110) 'Circle : y direction',ry
endif
do m = 1,4
if (.not. check(m)) then
write(6,*) '*** WARNING : Boxes or circles do not ',
& ' align with grid.'
stop
endif
enddo
enddo
write(6,*) ' '
100 format(A20,I10)
110 format(A20,F10.5)
end
logical function is_in_circle(xp,yp,dr)
implicit none
double precision xp,yp, xc1, yc1, zp
double precision boxes_com(6,100)
integer n_box_com
common /combox0/ n_box_com
common /combox1/ boxes_com
integer i
double precision r, xc, yc, r2, dr
is_in_circle = .false.
do i = 1,n_box_com
xc = (boxes_com(1,i) + boxes_com(3,i))/2
yc = (boxes_com(2,i) + boxes_com(4,i))/2
r = boxes_com(5,i)*(boxes_com(3,i) - boxes_com(1,i))/2
r2 = sqrt((xp-xc)**2 + (yp - yc)**2)
if (abs(r2 - r)/r .le. 0.1d0) then
is_in_circle = .true.
return
endif
enddo
end
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/rocm/mivisionx/lib:/opt/rocm/rpp/lib
rm -rf RALI-GPU-RESULTS
mkdir RALI-GPU-RESULTS
../../../utilities/rali/rali_unittests/build/rali_unittests image_224x224 RALI-GPU-RESULTS/1-RALI-GPU-Rotate.png 224 224 2 1 1
../../../utilities/rali/rali_unittests/build/rali_unittests image_224x224 RALI-GPU-RESULTS/2-RALI-GPU-Brightness.png 224 224 3 1 1
../../../utilities/rali/rali_unittests/build/rali_unittests image_224x224 RALI-GPU-RESULTS/3-RALI-GPU-Flip.png 224 224 6 1 1
../../../utilities/rali/rali_unittests/build/rali_unittests image_224x224 RALI-GPU-RESULTS/4-RALI-GPU-Blur.png 224 224 7 1 1
../../../utilities/rali/rali_unittests/build/rali_unittests image_224x224 RALI-GPU-RESULTS/5-RALI-GPU-SnPNoise.png 224 224 13 1 1
../../../utilities/rali/rali_unittests/build/rali_unittests image_224x224 RALI-GPU-RESULTS/6-RALI-GPU-Snow.png 224 224 14 1 1
../../../utilities/rali/rali_unittests/build/rali_unittests image_224x224 RALI-GPU-RESULTS/7-RALI-GPU-Pixelate.png 224 224 19 1 1
|
module Auth
class Admin::UserTaggedsController < Admin::BaseController
before_action :set_user_tag
before_action :set_user_tagged, only: [:show, :edit, :update]
def index
@user_taggeds = @user_tag.user_taggeds.page(params[:page])
end
def new
@user_tagged = @user_tag.user_taggeds.build
end
def create
@user_tagged = @user_tag.user_taggeds.build(user_id: params[:user_id])
unless @user_tagged.save
render :new, locals: { model: @user_tagged }, status: :unprocessable_entity
end
end
def search
@select_ids = @user_tag.users.default_where('accounts.identity': params[:identity]).pluck(:id)
@users = User.default_where('accounts.identity': params[:identity])
end
def destroy
if params[:id]
@user_tagged = @user_tag.user_taggeds.find params[:id]
elsif params[:user_id]
@user_tagged = @user_tag.user_taggeds.find_by(user_id: params[:user_id])
end
@user_tagged.destroy if @user_tagged
end
private
def set_user_tag
@user_tag = UserTag.find params[:user_tag_id]
end
def set_user_tagged
@user_tagged = @user_tag.user_taggeds.find params[:id]
end
end
end
|
// Mock ApiService object
const ApiService = {
async getRestaurantData() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async getMenuData() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async getAnalytics() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async updateRestaurant() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async removeOldMenu() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async addNewMenuSection() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async addNewMenuItem() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async getOpenOrdersForRestaurant() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async completeOpenOrder() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async submitOrder() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async findRestaurants() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async getCustomerProfile() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async updateCustomerProfile() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async retrieveOrders() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
async addPaymentMethod() {
return new Promise((resolve) => {
resolve({ status: 'success' });
});
},
};
export default ApiService;
|
<?php
namespace Adldap\Laravel\Tests;
use Adldap\Connections\Ldap;
use Adldap\Laravel\Auth\DatabaseUserProvider;
use Adldap\Laravel\Tests\Models\User;
use Adldap\Schemas\ActiveDirectory;
use Illuminate\Support\Facades\Schema;
class DatabaseTestCase extends TestCase
{
public function setUp()
{
parent::setUp();
// Create the users table for testing
Schema::create('users', function ($table) {
$table->increments('id');
$table->string('name');
$table->string('email')->unique();
$table->string('password', 60);
$table->rememberToken();
$table->timestamps();
$table->softDeletes();
});
}
/**
* Define the environment setup.
*
* @param \Illuminate\Foundation\Application $app
*/
protected function getEnvironmentSetup($app)
{
// Laravel database setup.
$app['config']->set('database.default', 'testbench');
$app['config']->set('database.connections.testbench', [
'driver' => 'sqlite',
'database' => ':memory:',
'prefix' => '',
]);
// Adldap connection setup.
$app['config']->set('adldap.connections.default.auto_connect', false);
$app['config']->set('adldap.connections.default.connection', Ldap::class);
$app['config']->set('adldap.connections.default.schema', ActiveDirectory::class);
$app['config']->set('adldap.connections.default.connection_settings', [
'admin_username' => '[email protected]',
'admin_password' => 'password',
]);
// Adldap auth setup.
$app['config']->set('adldap_auth.provider', DatabaseUserProvider::class);
// Laravel auth setup.
$app['config']->set('auth.guards.web.provider', 'adldap');
$app['config']->set('auth.providers', [
'adldap' => [
'driver' => 'adldap',
'model' => User::class,
],
'users' => [
'driver' => 'eloquent',
'model' => User::class,
],
]);
}
}
|
using FreeRedis.Internal;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace FreeRedis
{
partial class RedisClient
{
class SingleTempAdapter : BaseAdapter
{
readonly RedisClient _cli;
readonly IRedisSocket _redisSocket;
readonly Action _dispose;
public SingleTempAdapter(RedisClient cli, IRedisSocket redisSocket, Action dispose)
{
UseType = UseType.SingleInside;
_cli = cli;
_redisSocket = redisSocket;
_dispose = dispose;
}
public override void Dispose()
{
_dispose?.Invoke();
}
public override IRedisSocket GetRedisSocket(CommandPacket cmd)
{
return DefaultRedisSocket.CreateTempProxy(_redisSocket, null);
}
public override T2 AdapaterCall<T1, T2>(CommandPacket cmd, Func<RedisResult<T1>, T2> parse)
{
return _cli.LogCall(cmd, () =>
{
_redisSocket.Write(cmd);
var rt = cmd.Read<T1>();
rt.IsErrorThrow = _cli._isThrowRedisSimpleError;
return parse(rt);
});
}
}
}
}
|
<?php
namespace App\Http\Controllers;
use App\Services\DateCreatorService;
class WeatherController extends Controller
{
//Display the index page and add start dates to buttons
public function getIndex()
{
//Get Mondays date for the week filter
$monday = DateCreatorService::getMonday();
return \View::make('index', array(
'today' => date('Y-m-d'),
'monday' => $monday,
'first' => date('Y-m-'.'01')
));
}
}
|
##########################################################################
# Copyright 2007 Applied Research in Patacriticism and the University of Virginia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
Collex::Application.routes.draw do
# The priority is based upon order of creation:
# first created -> highest priority.
# Sample of regular route:
# match 'products/:id' => 'catalog#view'
# Keep in mind you can assign values other than :controller and :action
# Sample of named route:
# match 'products/:id/purchase' => 'catalog#purchase', :as => :purchase
# This route can be invoked with purchase_url(:id => product.id)
# Sample resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Sample resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Sample resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Sample resource route with more complex sub-resources
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', :on => :collection
# end
# end
# Sample resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
# You can have the root of your site routed with "root"
# just remember to delete public/index.html.
# root :to => 'welcome#index'
# See how all your routes lay out with "rake routes"
# This is a legacy wild controller route that's not recommended for RESTful applications.
# Note: This route will make all actions in every controller accessible via GET requests.
# match ':controller(/:action(/:id(.:format)))'
# for transmitting the theme to wordpress
get "/wrapper" => "home#wrapper"
get "/login_slider" => "home#login_slider"
get "test_js/general_dialog"
# get '/search/add_tw_constraint' => 'search#add_tw_constraint'
match '/search/list_name_facet_all' => 'search#list_name_facet_all'
#post '/search/remove_constraint' => 'search#remove_constraint'
#post '/search/add_federation_constraint' => 'search#add_federation_constraint'
#post '/search/constrain_resource' => 'search#constrain_resource'
#post '/search/add_facet' => 'search#add_facet'
#post '/search/constrain_freeculture' => 'search#constrain_freeculture'
#post '/search/constrain_fulltext' => 'search#constrain_fulltext'
#post '/search/constrain_typewright' => 'search#constrain_typewright'
#post '/search/invert_constraint' => 'search#invert_constraint'
#post '/search/sort_by' => 'search#sort_by'
#post '/search/remove_genre' => 'search#remove_genre'
#post '/search/remove_discipline' => 'search#remove_discipline'
#post '/search/remove_format' => 'search#remove_format'
post '/search/remember_resource_toggle' => 'search#remember_resource_toggle'
match '/search/saved' => 'search#saved'
match '/search/save_search' => 'search#save_search'
post '/search/remove_saved_search' => 'search#remove_saved_search'
match '/search/browse' => 'search#browse'
match 'browse/saved/:username/:name' => 'search#saved_permalink'
#match '/search' => 'search#browse'
get '/search' => 'search#index'
post '/search/auto_complete_for_q' => 'search#auto_complete_for_q'
post '/search/auto_complete_for_search_university' => 'search#auto_complete_for_search_university'
post '/results/add_object_to_exhibit' => 'results#add_object_to_exhibit'
post '/results/add_tag' => 'results#add_tag'
post '/results/bulk_add_tag' => 'results#bulk_add_tag'
post '/results/bulk_collect' => 'results#bulk_collect'
post '/results/bulk_uncollect' => 'results#bulk_uncollect'
post '/results/collect' => 'results#collect'
post '/results/edit_tag' => 'results#edit_tag'
match '/results/redraw_result_row_for_popup_buttons' => 'results#redraw_result_row_for_popup_buttons'
post '/results/remove_all_tags' => 'results#remove_all_tags'
post '/results/remove_tag' => 'results#remove_tag'
post '/results/set_annotation' => 'results#set_annotation'
post '/results/uncollect' => 'results#uncollect'
post '/results/resend_exhibited_objects' => 'results#resend_exhibited_objects'
match '/help/resources' => 'help#resources'
match '/help/sites' => 'help#sites'
post '/communities/view_by' => 'communities#view_by'
post '/communities/sort_by' => 'communities#sort_by'
post '/communities/search' => 'communities#search'
post "/communities/page" => "communities#page"
post '/builder/add_additional_author' => 'builder#add_additional_author'
post '/builder/change_exhibits_group' => 'builder#change_exhibits_group'
post '/builder/change_exhibits_cluster' => 'builder#change_exhibits_cluster'
post '/builder/change_sharing' => 'builder#change_sharing'
post '/builder/change_img_width' => 'builder#change_img_width'
post '/builder/change_element_type' => 'builder#change_element_type'
get '/builder/change_page' => 'builder#change_page'
post '/builder/edit_exhibit_overview' => 'builder#edit_exhibit_overview'
post '/builder/edit_text' => 'builder#edit_text'
post '/builder/edit_header' => 'builder#edit_header'
post '/builder/edit_element' => 'builder#edit_element'
post '/builder/edit_illustration' => 'builder#edit_illustration'
post '/builder/find_page_containing_element' => 'builder#find_page_containing_element'
post '/builder/get_all_collected_objects' => 'builder#get_all_collected_objects'
post '/builder/get_alias_users' => 'builder#get_alias_users'
post '/builder/modify_outline' => 'builder#modify_outline'
post '/builder/modify_outline_page' => 'builder#modify_outline_page'
post '/builder/modify_border' => 'builder#modify_border'
post '/builder/remove_additional_author' => 'builder#remove_additional_author'
post '/builder/remove_exhibited_object' => 'builder#remove_exhibited_object'
post '/builder/redraw_exhibit_page' => 'builder#redraw_exhibit_page'
post '/builder/refresh_outline' => 'builder#refresh_outline'
post '/builder/reset_exhibit_page_from_outline' => 'builder#reset_exhibit_page_from_outline'
post '/builder/set_exhibit_author_alias' => 'builder#set_exhibit_author_alias'
post '/builder/publish_exhibit' => 'builder#publish_exhibit'
post '/builder/update_fonts' => 'builder#update_fonts'
post '/builder/update_objects_in_exhibits' => 'builder#update_objects_in_exhibits'
post '/builder/update_title' => 'builder#update_title'
post '/builder/verify_title' => 'builder#verify_title'
post '/builder/change_illustration_justification'
post '/builder/insert_illustration'
post '/builder/edit_row_of_illustrations'
post '/builder/import_exhibit'
post '/builder/modify_outline_add_first_element'
# This gets called before environment.rb, so the constant we need isn't loaded yet. We'll load it here.
config_file = File.join(Rails.root, "config", "site.yml")
my_collex = 'my_collex'
plugins = {}
if File.exists?(config_file)
site_specific = YAML.load_file(config_file)
my_collex = site_specific['my_collex_url']
plugins = site_specific['plugins'] || {}
plugins.delete_if { |key, value| value != true }
end
get "/#{my_collex}" => 'my_collex#index'
get "/#{my_collex}/results" => 'my_collex#results'
get "/#{my_collex}/get_typewright_documents" => 'my_collex#get_typewright_documents'
post "/#{my_collex}/results" => 'my_collex#results'
post '/my_collex/remove_profile_picture' => 'my_collex#remove_profile_picture'
post '/my_collex/show_profile' => 'my_collex#show_profile'
post '/my_collex/update_profile_upload' => 'my_collex#update_profile_upload'
post '/my_collex/update_profile' => 'my_collex#update_profile'
if plugins['typewright']
get "typewright/edit"
post "typewright/remove_doc"
namespace :typewright do
get 'documents/not_available'
get 'documents/not_signed_in'
post 'documents/instructions'
post 'documents/:id/complete' => 'documents#page_complete'
post 'documents/:id/status' => 'documents#update_status'
post 'documents/:id/report' => 'documents#report'
put 'documents/:id/delete_edits' => 'documents#delete_edits'
resources :admin do
collection do
get 'stats'
end
end
resources :documents
resources :lines
resources :document_users
resources :overviews, only: [ :index, :show ] do
collection do
get 'retrieve_doc'
end
end
end
get "typewright" => 'typewright/documents#index'
end
get "/login/logout" => "login#logout"
post "/login/verify_login" => "login#verify_login"
post "/login/submit_signup" => "login#submit_signup"
post "/login/recover_username" => "login#recover_username"
post "/login/reset_password" => "login#reset_password"
get "/login/login_controls" => "login#login_controls"
match "/forum/view_thread" => "forum#view_thread"
match "/forum/view_topic" => "forum#view_topic"
match "/forum/get_nines_obj_list_with_image" => "forum#get_nines_obj_list_with_image"
match "/forum/get_exhibit_list" => "forum#get_exhibit_list"
match "/forum/get_nines_obj_list" => "forum#get_nines_obj_list"
post "/forum/post_comment_to_existing_thread" => "forum#post_comment_to_existing_thread"
match "/forum/result_count" => "forum#result_count"
post "/forum/edit_existing_comment" => "forum#edit_existing_comment"
post "/forum/delete_comment" => "forum#delete_comment"
post "/forum/post_comment_to_new_thread" => "forum#post_comment_to_new_thread"
match "/forum/get_all_topics" => "forum#get_all_topics"
post "/forum/post_object_to_new_thread" => "forum#post_object_to_new_thread"
post "/forum/report_comment" => "forum#report_comment"
post '/forum/get_object_details' => 'forum#get_object_details'
post "/classroom/facet_on_group" => "classroom#facet_on_group"
post "/classroom/search" => "classroom#search"
post "/classroom/sort_by" => "classroom#sort_by"
post "/classroom/view_by" => "classroom#view_by"
post "/classroom/page" => "classroom#page"
# match '/vic_conference/create' => 'vic_conference#create', :as => :vic_conference
# match '/vic_conference/auth' => 'vic_conference#auth', :as => :vic_conference_auth
post "/groups/remove_profile_picture/:id" => "groups#remove_profile_picture"
get '/groups/stale_request' => 'groups#stale_request', :as => :stale_request
get '/groups/accept_request' => 'groups#accept_request', :as => :accept_request
get '/groups/decline_request' => 'groups#decline_request', :as => :decline_request
match '/groups/decline_invitation' => 'groups#decline_invitation', :as => :decline_invitation
match '/groups/accept_invitation' => 'groups#accept_invitation', :as => :accept_invitation
get '/groups/acknowledge_notification' => 'groups#acknowledge_notification', :as => :acknowledge_notification
match '/groups/create_login' => 'groups#create_login', :as => :create_login
match 'groups/create_login_create' => 'groups#create_login_create'
get '/groups/:group/:cluster' => 'clusters#show'
post "/groups/limit_exhibit" => "groups#limit_exhibit"
post "/groups/unlimit_exhibit" => "groups#unlimit_exhibit"
post "/groups/sort_exhibits" => "groups#sort_exhibits"
post "/groups/unpublish_exhibit" => "groups#unpublish_exhibit"
post "/groups/group_exhibits_list" => "groups#group_exhibits_list"
post "/groups/notifications" => "groups#notifications"
post "/groups/edit_membership" => "groups#edit_membership"
post "/groups/render_license" => "groups#render_license"
post "/groups/check_url" => "groups#check_url"
post "/groups/edit_thumbnail" => "groups#edit_thumbnail"
post "/groups/sort_cluster" => "groups#sort_cluster"
post "/groups/leave_group" => "groups#leave_group"
post "/groups/request_join" => "groups#request_join"
post "/groups/verify_group_title" => "groups#verify_group_title"
post '/groups/get_all_groups' => 'groups#get_all_groups'
post '/groups/accept_as_peer_reviewed' => 'groups#accept_as_peer_reviewed'
post '/groups/reject_as_peer_reviewed' => 'groups#reject_as_peer_reviewed'
post '/groups/pending_requests' => 'groups#pending_requests'
post "/clusters/remove_profile_picture" => "clusters#remove_profile_picture"
post "/clusters/move_exhibit" => "clusters#move_exhibit"
post "/clusters/edit_thumbnail" => "clusters#edit_thumbnail"
post "/clusters/check_url" => "clusters#check_url"
post "/admin/default/refresh_cached_objects" => "admin/default#refresh_cached_objects"
get "/admin/facet_tree/index" => "admin/facet_tree#index"
get "/admin/features/index" => "admin/features#index"
get "/admin/user_roles/index" => "admin/user_roles#index"
get "/admin/discussion_topics/index" => "admin/discussion_topics#index"
get "/admin/default/forum_pending_reports" => "admin/default#forum_pending_reports"
get "/admin/default/stats" => "admin/default#stats"
get "/admin/default/groups" => "admin/default#groups"
get "/admin/default/user_content" => "admin/default#user_content"
# get "/admin/default/vic_conference" => "admin/default#vic_conference"
get "/admin/default/use_test_index" => "admin/default#use_test_index"
get "/admin/default/reload_facet_tree" => "admin/default#reload_facet_tree"
get "/admin/default/stats_show_all" => "admin/default#stats_show_all"
post "/admin/default/change_group_type" => "admin/default#change_group_type"
post "/admin/default/add_badge" => "admin/default#add_badge"
post "/admin/default/add_publication_image" => "admin/default#add_publication_image"
post "/admin/default/delete_comment" => "admin/default#delete_comment"
post "/admin/default/remove_abuse_report" => "admin/default#remove_abuse_report"
post "/admin/facet_tree/remove_site" => "admin/facet_tree#remove_site"
post "/admin/facet_tree/get_categories" => "admin/facet_tree#get_categories"
post "/admin/facet_tree/add_category" => "admin/facet_tree#add_category"
post "/admin/facet_tree/get_categories_and_details" => "admin/facet_tree#get_categories_and_details"
post "/admin/facet_tree/edit_facet" => "admin/facet_tree#edit_facet"
post "/admin/facet_tree/edit_facet_upload" => "admin/facet_tree#edit_facet_upload"
post "/admin/facet_tree/delete_facet" => "admin/facet_tree#delete_facet"
post "/admin/facet_tree/add_site" => "admin/facet_tree#add_site"
post "/admin/discussion_topics/move_down" => "admin/discussion_topics#move_down"
post "/admin/discussion_topics/move_up" => "admin/discussion_topics#move_up"
post "/admin/discussion_topics/:id" => "admin/discussion_topics#update"
post "/admin/impersonate_user" => "admin/default#impersonate_user"
post "/admin/get_user_list" => "admin/default#get_user_list"
post '/exhibits/get_licenses' => 'exhibits#get_licenses'
get '/home/get_footer_data' => 'home#get_footer_data'
post '/tag/set_zoom' => 'tag#set_zoom'
resources :builder
resources :clusters
resources :groups
resources :publications
resources :communities
resources :classroom
namespace :admin do
resources :features
resources :user_roles
resources :discussion_topics
resources :setups, :only => [ :index, :update ]
end
match '/forum/rss/:thread.xml' => 'forum#rss', :as => :discussion_thread_rss
resources :exhibit_illustrations
resources :exhibit_elements
resources :exhibit_pages
match '/exhibits/:id' => 'exhibits#view', :as => :exhibits_display
resources :exhibits
resources :tagassigns
resources :collected_items
#match 'atom/:type/:value/:user' => 'home#atom', :as => :atom_feed, :user => , :value => /[^\/]+/
match 'collex' => 'home#redirect_to_index'
match 'sidebar/list/:type/:value/:user' => 'home#redirect_to_index', :as => :sidebar_list
match 'sidebar/cloud/:type/:user' => 'home#redirect_to_index', :as => :sidebar_cloud
match 'permalink/list/:type/:value/:user' => 'home#redirect_to_index', :as => :permalink_list
match 'permalink/cloud/:type/:user' => 'home#redirect_to_tag_cloud_update', :as => :permalink_cloud
match 'permalink/detail' => 'home#redirect_to_index', :as => :permalink_detail
match 'redirect/ravon-nowviskie1.html' => 'search#saved', :user => 'nowviskie', :name => 'ravon-article'
match 'redirect/ravon-nowviskie2.html' => 'tag#results', :view => 'tag', :tag => 'collex'
match 'permalink/cloud/:type' => 'home#redirect_to_tag_cloud_update', :as => :cloud1
match 'collex/:action' => 'search#index'
match 'admin' => 'admin/default#index'
match '/tags.xml' => 'tag#list', :as => :tag_xml, :format => 'xml'
match '/tags/rss/:tag.xml' => 'tag#rss'
match '/tags/object' => 'tag#object'
match '/tags/results' => 'tag#results'
match '/tag/results' => 'tag#results'
match '/tag/testtags' => 'tag#testtags'
match '/tag/tag_name_autocomplete' => 'tag#tag_name_autocomplete'
get '/news' => 'home#news'
get '/home/news' => 'home#news'
match '/tags' => 'tag#list'
match '/tag/update_tag_cloud' => 'tag#update_tag_cloud'
match '/forum' => 'forum#index'
match '/print_exhibit/:id' => 'exhibits#print_exhibit'
match '/exhibit_list' => 'communities#index'
match '/exhibits/view/(:id)' => 'exhibits#view'
match '/test_exception_notifier' => 'application#test_exception_notifier'
post '/test_error_response' => 'application#test_error_response'
root :to => "home#index"
end
|
package com.thedancercodes.android.creatures.ui.test.mapper
import com.thedancercodes.android.creatures.ui.mapper.CreatureMapper
import com.thedancercodes.android.creatures.ui.test.factory.CreatureFactory
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import kotlin.test.assertEquals
@RunWith(JUnit4::class)
class CreatureMapperTest {
private lateinit var creatureMapper: CreatureMapper
@Before
fun setUp() {
creatureMapper = CreatureMapper()
}
@Test
fun mapToViewMapsData() {
val creatureView = CreatureFactory.makeCreatureView()
val creatureViewModel = creatureMapper.mapToViewModel(creatureView)
assertEquals(creatureView.firstName, creatureViewModel.firstName)
assertEquals(creatureView.lastName, creatureViewModel.lastName)
assertEquals(creatureView.nickname, creatureViewModel.nickname)
assertEquals(creatureView.image, creatureViewModel.image)
assertEquals(creatureView.planet, creatureViewModel.planet)
}
}
|
require 'guard/jobs/base'
module Guard
module Jobs
class PryWrapper < Base
def _setup(options)
Pry.config.should_load_rc = false
Pry.config.should_load_local_rc = false
history_file_path = options[:history_file] || HISTORY_FILE
if legacy_pry?
Pry.config.history.file = File.expand_path(history_file_path)
else
Pry.config.history_file = File.expand_path(history_file_path)
end
_add_hooks(options)
::Guard::Commands::All.import
::Guard::Commands::Change.import
::Guard::Commands::Notification.import
::Guard::Commands::Pause.import
::Guard::Commands::Reload.import
::Guard::Commands::Show.import
::Guard::Commands::Scope.import
_setup_commands
_configure_prompt
end
private
attr_reader :thread
def legacy_pry?
Gem::Version.new(Pry::VERSION) < Gem::Version.new('0.13')
end
# Colorizes message using Thor Color Util
#
def _colorize(text, color)
Marv.colorize(text, color)
end
# Configures the pry prompt to see `guard` instead of
# `pry`.
#
def _configure_prompt
prompt_procs = [
_prompt(_colorize("\u00BB", :green)),
_prompt(_colorize("*", :yellow))
]
if legacy_pry?
Pry.config.prompt = prompt_procs
else
prompt_args = [:marv, 'Marv prompt for guard watcher', prompt_procs]
Pry::Prompt.add(*prompt_args) do |context, nesting, pry_instance, sep|
sep.call(context, nesting, pry_instance)
end
Pry.config.prompt = Pry::Prompt[:marv]
end
end
# Returns a proc that will return itself a string ending with the given
# `ending_char` when called.
#
def _prompt(ending_char)
proc do |target_self, nest_level, pry|
history = pry.input_ring.size
process = ::Guard.listener.paused? ? _colorize("pause", :yellow) : _colorize("marv", :green)
level = ":#{nest_level}" unless nest_level.zero?
hist_text = _colorize("[#{history}]", :yellow)
clip_text = _colorize("(#{_clip_name(target_self)})", :cyan)
level_text = _colorize("#{level}", :cyan)
path_text = _colorize(File.basename(Dir.pwd), :magenta)
"#{hist_text} #{_scope_for_prompt}#{process} #{path_text} #{clip_text}#{level_text} #{ending_char} "
end
end
end
end
end
|
using System;
using System.Configuration;
namespace XSockets.Geo.WebTestClient
{
public partial class _default : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
if (!IsPostBack)
{
XSocketsUrl = ConfigurationManager.AppSettings["XSocketServerAdress"];
BingKey = ConfigurationManager.AppSettings["BingKey"];
}
}
public string XSocketsUrl { get; set; }
public string BingKey { get; set; }
}
}
|
import { MutableRefObject } from 'react'
import { delay } from '../../events/delay'
/**
* Determine if a promise has been resolved.
* @param promise - The promise to check.
* @returns A boolean indicating whether the promise has been resolved.
* @example
* // Asynchronous usage
* async function exampleCallback(): Promise<void> {
* // ...
* }
* const examplePromise = exampleCallback()
* const isPromiseResolved = await isResolved(examplePromise)
* console.log(isPromiseResolved)
* @example
* // Synchronous usage
* async function exampleCallback(): Promise<void> {
* // ...
* }
* const examplePromise = exampleCallback()
* const isPromiseResolved = { current: false }
* isResolved(examplePromise, isPromiseResolved)
* console.log(isPromiseResolved.current)
* @public
*/
export function isResolved(
promise: Promise<unknown>,
flag?: MutableRefObject<boolean>
): Promise<boolean> | void {
if (flag) {
promise.then((): void => {
flag.current = true
})
} else {
let internalFlag = false
promise.then((): void => {
internalFlag = true
})
return new Promise((resolve): void => {
// This gives then `.then` callback some time to execute.
delay(0).then((): void => {
resolve(internalFlag)
})
})
}
}
export function isResolved2(
promise: Promise<unknown>,
flag: MutableRefObject<boolean>
): void {
promise.then((): void => {
flag.current = true
})
}
|
# Stock research from TWSE
## crawler-service
##### Spring boot + Web + jsoup + mongoDb
It's a crawler service for twse to get stock info.
http://mis.twse.com.tw/stock/fibest.jsp
|
#!/bin/bash
set -euo pipefail
prosodyctl --config ./prosody.cfg.lua register testpilot localhost asdf
prosody --config ./prosody.cfg.lua
|
module API
module V3
class ProviderSuggestionsController < API::V3::ApplicationController
before_action :build_recruitment_cycle
def index
return render(status: :bad_request) if params[:query].nil? || params[:query].length < 3
found_providers = @recruitment_cycle.providers
.with_findable_courses
.search(params[:query])
.limit(10)
render(
jsonapi: found_providers,
class: { Provider: SerializableProvider },
)
end
private
def begins_with_alphanumeric(string)
string.match?(/^[[:alnum:]].*$/)
end
end
end
end
|
package socket
import (
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDecodePackage(t *testing.T) {
convey.Convey("test decode packet", t, func() {
msgByte := message{Data: []byte("hello"), EventName: "test"}.MarshalBinary()
p := sockPackage{PT: PackTypeEvent, Payload: msgByte}.MarshalBinary()
// Remove \n char. In real code this will remove by method ReadBytes("\n")!
comlPack := p[:len(p)-1]
pack := decodePackage(comlPack)
convey.So(pack.PT, convey.ShouldEqual, PackTypeEvent)
convey.So(pack.Payload, convey.ShouldHaveLength, len(msgByte))
msg := decodeMessage(pack.Payload)
convey.So(msg.EventName, convey.ShouldEqual, "test")
convey.So(string(msg.Data), convey.ShouldEqual, "hello")
})
convey.Convey("test decode packet", t, func() {
msgByte := message{Data: append([]byte("hello"), []byte("\n")...), EventName: "test"}.MarshalBinary()
p := sockPackage{PT: PackTypeEvent, Payload: msgByte}.MarshalBinary()
// Remove \n char. In real code this will remove by method ReadBytes("\n")!
comlPack := p[:len(p)-1]
pack := decodePackage(comlPack)
convey.So(pack.PT, convey.ShouldEqual, PackTypeEvent)
convey.So(pack.Payload, convey.ShouldHaveLength, len(msgByte)-1)
msg := decodeMessage(pack.Payload)
convey.So(msg.EventName, convey.ShouldEqual, "test")
convey.So(string(msg.Data), convey.ShouldEqual, "hello")
})
convey.Convey("test decode packet with bytes message data", t, func() {
msgByte := message{
EventName: "test",
Data: []byte{0x1, 0x2, 0x03},
}.MarshalBinary()
p := sockPackage{PT: PackTypeEvent, Payload: msgByte}.MarshalBinary()
// Remove \n char. In real code this will remove by method ReadBytes("\n")!
comlPack := p[:len(p)-1]
pack := decodePackage(comlPack)
convey.So(pack.PT, convey.ShouldEqual, PackTypeEvent)
convey.So(pack.Payload, convey.ShouldHaveLength, len(msgByte))
msg := decodeMessage(pack.Payload)
convey.So(msg.EventName, convey.ShouldEqual, "test")
convey.So(msg.Data, convey.ShouldHaveLength, 3)
convey.So(msg.Data[0], convey.ShouldEqual, byte(0x1))
convey.So(msg.Data[1], convey.ShouldEqual, byte(0x2))
convey.So(msg.Data[2], convey.ShouldEqual, byte(0x03))
})
convey.Convey("test get packet byte", t, func() {
p1 := PackTypeEvent
convey.So(p1.Byte(), convey.ShouldEqual, 0x02)
p2 := PackTypeConnect
convey.So(p2.Byte(), convey.ShouldEqual, 0x00)
p3 := PackTypeDisconnect
convey.So(p3.Byte(), convey.ShouldEqual, 0x01)
})
}
|
# -*-coding: utf-8 -*-
from django.db import models
class DBLogEntry(models.Model):
time = models.DateTimeField(auto_now_add=True)
level = models.CharField(max_length=10)
message = models.TextField()
def __str__(self):
return str(self.time.strftime("%d.%B.%Y %H:%M"))+" "+str(self.level)
|
//This file is part of Photon (http://photon.sourceforge.net)
//Copyright (C) 2004-2005 James Turk
//
// Author:
// James Turk ([email protected])
//
// Version:
// $Id: RandGen.hpp,v 1.6 2005/10/30 21:08:57 cozman Exp $
#ifndef PHOTON_UTIL_RANDGEN_HPP
#define PHOTON_UTIL_RANDGEN_HPP
namespace photon
{
namespace util
{
// Class: RandGen
// Psuedorandom number generator class which uses Mersenne Twister.
// MT19937 is described at
// <http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html>.
class RandGen
{
// Group: (Con/De)structors
public:
// Function: RandGen
// Constructor for random generator, using time as seed.
RandGen();
// Function: RandGen
// Constructor for random generator, allowing specific seed.
//
// Parameters:
// seedVal - Seed for random generator, a given seed will always turn out
// the same string of random numbers.
//
// See Also:
// <seed>
RandGen(unsigned long seedVal);
// Group: General
public:
// Function: seed
// Reseed random generator, a given seed will always turn out same string
// of random numbers.
//
// Parameters:
// seed - Seed for random generator.
void seed(unsigned long seedVal);
// Function: genRand
// Obtain random number in range [0,max).
//
// Parameters:
// max - Boundary for random number.
//
// Returns:
// Random number from 0 to max-1.
unsigned long genRand(unsigned long max);
// Function: genRand
// Obtain random integer in range [min,max].
//
// Parameters:
// min - Minimum boundary for random number.
// max - Maximum boundary for random number.
//
// Returns:
// Random number from min to max.
int genRand(int min, int max);
// Function: genRand
// Obtain random double in range [min,max].
//
// Parameters:
// min - Minimum boundary for random number.
// max - Maximum boundary for random number.
//
// Returns:
// Random number from min to max.
double genRand(double min, double max);
// Function: genRandSign
// Obtain random sign, positive or negative.
//
// Returns:
// Either -1.0 or +1.0
double genRandSign();
// Function: genRand01
// Obtain random double in range [0,1).
//
// Returns:
// Random number from 0 to under 1.
double genRand01();
private: // utilities
unsigned long genrand_int32(); //base of all generation
private: // constants
static const unsigned long N = 624;
static const unsigned long M = 397;
static const unsigned long MATRIX_A = 0x9908b0dfUL;
static const unsigned long UPPER_MASK = 0x80000000UL;
static const unsigned long LOWER_MASK = 0x7fffffffUL;
private: //state data
unsigned long stateVector_[N];
unsigned long stateVectorIndex_;
};
}
}
#endif //PHOTON_UTIL_RANDGEN_HPP
|
<?php
/**
* Created by PhpStorm.
* User: haohui
* Date: 2016/6/30
* Time: 10:13
*/
namespace App\Service;
use App\Models\Setting;
class BootService
{
public function domainSetting()
{
$data = Setting::all()->toArray();
$setting = [];
foreach ($data as $key => $item) {
$setting[$item['skey']] = $item['svalue'];
}
return $setting;
}
}
|
---
title: Docker 镜像保存和加载
date: 2020-01-17 10:00:00
tags: 'Docker'
categories:
- ['部署', '容器化']
permalink: docker-save-load
photo:
---
## 简介
> 这篇文章主要介绍了一种根据便捷的镜像交付过程
一般项目的交付都需要一个甲乙双方私用的注册中心,然后乙方推送后自动在甲方那边集成部署,当然对于很多项目,大部分流程都是乙方打包,然后通过邮件或者其他形式,直接进行发送处理。
docker 也提供类似的功能,通过 `docker save`, `docker load` 来保存和导入镜像,即可以实现简单的镜像交付
<!-- more -->
## docker save
`docker save`: 将指定镜像保存成 tar 归档文件
```sh
docker save [OPTIONS] IMAGE [IMAGE...]
OPTIONS说明
-o: 输出的文件, tar 格式
```
## docker load
`docker load`: 导入使用 `docker save` 命令导出的镜像
```sh
docker load [OPTIONS]
OPTIONS说明
--input , -i: 指定导入的文件,代替 STDIN
--quiet , -q: 精简输出信息
```
|
namespace cbb.core
{
using System.ComponentModel;
/// <summary>
/// A base view model functionality for all view models.
/// </summary>
public class BaseViewModel : INotifyPropertyChanged
{
#region events
/// <summary>
/// Occurs when a property value changes.
/// </summary>
public event PropertyChangedEventHandler PropertyChanged = (sender, e) => { };
#endregion
#region public methods
/// <summary>
/// Call this method to raise <see cref="PropertyChanged"/> event.
/// </summary>
/// <param name="name">The name.</param>
public void OnPropertyChanged(string name)
{
PropertyChanged(this, new PropertyChangedEventArgs(name));
}
#endregion
}
}
|
using UnityEngine;
using System.Collections;
public class ArmyCommandsController : MonoBehaviour {
public StrategyController strCtrl;
public MyPathfinding path;
public SelectTargetCity selectTargetCity;
public MenuDisplayAnim armyCommands;
public ACInformationController infoCtrl;
public CCGeneralsInfoController generalsInfo;
public Button[] commands;
private int state = 0;
private ArmyInfo armyInfo;
private int commandIdx = -1;
private int cityCanIntoIdx = -1;
private float timeTick;
// Use this for initialization
void Start () {
for (int i=0; i<commands.Length; i++) {
commands[i].SetButtonData(i);
commands[i].SetButtonClickHandler(OnCommandsButtonClickHandler);
}
}
void OnEnable() {
commandIdx = -1;
armyCommands.SetAnim(MenuDisplayAnim.AnimType.InsertFromLeft);
infoCtrl.GetComponent<MenuDisplayAnim>().SetAnim(MenuDisplayAnim.AnimType.InsertFromRight);
}
// Update is called once per frame
void Update () {
switch (state) {
case 0:
OnNormalModeHandler();
break;
case 1:
OnChangingToCommandModeHandler();
break;
}
}
void OnNormalModeHandler() {
if (Misc.GetBack()) {
state = 1;
commandIdx = -1;
armyCommands.SetAnim(MenuDisplayAnim.AnimType.OutToLeft);
infoCtrl.GetComponent<MenuDisplayAnim>().SetAnim(MenuDisplayAnim.AnimType.OutToRight);
}
}
void OnChangingToCommandModeHandler() {
timeTick += Time.deltaTime;
if (timeTick >= 0.2f) {
timeTick = 0;
state = 0;
gameObject.SetActive(false);
switch (commandIdx) {
case -1:
strCtrl.ReturnMainMode();
break;
case 0:
generalsInfo.AddArmyGeneralsList(armyInfo);
break;
case 1:
selectTargetCity.SetArmy(armyInfo);
break;
case 2:
armyInfo.armyCtrl.SetArmyGarrison();
strCtrl.ReturnMainMode();
break;
case 3:
generalsInfo.AddArmyPrisonsList(armyInfo);
break;
case 4:
OnIntoCity();
break;
}
}
}
void OnCommandsButtonClickHandler(object data) {
commandIdx = (int)data;
state = 1;
armyCommands.SetAnim(MenuDisplayAnim.AnimType.OutToLeft);
infoCtrl.GetComponent<MenuDisplayAnim>().SetAnim(MenuDisplayAnim.AnimType.OutToRight);
}
void OnIntoCity() {
armyInfo.armyCtrl.IntoTheCity(cityCanIntoIdx);
strCtrl.ReturnMainMode();
}
public void SetArmyInfo(ArmyInfo a) {
armyInfo = a;
gameObject.SetActive(true);
infoCtrl.SetArmy(armyInfo);
if (armyInfo.king == Controller.kingIndex) {
for (int i=0; i<commands.Length; i++) {
commands[i].SetButtonEnable(true);
}
if (armyInfo.prisons.Count > 0) {
commands[3].SetButtonEnable(true);
} else {
commands[3].SetButtonEnable(false);
}
Vector3 pos = armyInfo.armyCtrl.transform.position;
cityCanIntoIdx = path.GetCityIndex(pos, 30);
if (cityCanIntoIdx != -1) {
if (Informations.Instance.GetCityInfo(cityCanIntoIdx).king == armyInfo.king) {
commands[4].SetButtonEnable(true);
} else {
commands[4].SetButtonEnable(false);
}
} else {
commands[4].SetButtonEnable(false);
}
} else {
for (int i=0; i<commands.Length; i++) {
commands[i].SetButtonEnable(false);
}
}
}
}
|
## 题3: 无重复字符的最长子串
### 描述
```
给定一个字符串,请你找出其中不含有重复字符的 最长子串 的长度。
示例 1:
输入: "abcabcbb"
输出: 3
解释: 因为无重复字符的最长子串是 "abc",所以其长度为 3。
示例 2:
输入: "bbbbb"
输出: 1
解释: 因为无重复字符的最长子串是 "b",所以其长度为 1。
示例 3:
输入: "pwwkew"
输出: 3
解释: 因为无重复字符的最长子串是 "wke",所以其长度为 3。
请注意,你的答案必须是 子串 的长度,"pwke" 是一个子序列,不是子串。
```
### 思路
标签: 滑动窗口
步骤:
暴力解法时间复杂度较高,会达到 O(n^2),故而采取滑动窗口的方法降低时间复杂度
定义一个 map 数据结构存储 (k, v),其中 key 值为字符,value 值为字符位置 +1,
加 1 表示从字符位置后一个才开始不重复
我们定义不重复子串的开始位置为 start,结束位置为 end
随着 end 不断遍历向后,会遇到与 [start, end] 区间内字符相同的情况,此时将字符作为 key 值,获取其 value 值,
并更新 start,此时 [start, end] 区间内不存在重复字符
无论是否更新 start,都会更新其 map 数据结构和结果 ans。
时间复杂度:O(n)
不合理的解法: 暴力法
简单说,就是遍历
相似的题: topK问题
java
```
```
|
---
layout: page
title: You want to know me?
tags: [about, know, new, here]
modified: 2015-05-14T20:53:07.573882-04:00
image:
feature: sample-image-2.jpg
credit: Raspberrypi
creditlink: https://www.raspberrypi.org/
---
### On this blog, I will talk about numerals things, like:
* Crazy ideias;
* Plugins and Applications made by me and developed through my ideas;
* Passions out of paper and materializing;
* Developed things on my work that need to be shared;
* And off course, some nonsense too... Geeks, News and other uncategorized informations.
<a markdown="0" href="https://icarobichir.com" class="btn">Welcome</a>
|
using Otter.Utility.MonoGame;
using System;
using WormGame.Entities;
using WormGame.Static;
namespace WormGame.Core
{
/// @author Antti Harju
/// @version v0.5
/// <summary>
/// Collision system.
/// </summary>
public class Collision
{
/// Collision types. Use these instead of raw ints.
public readonly int invalid = 0;
public readonly int worm = 1;
public readonly int block = 2;
public readonly int fruit = 3;
public readonly int empty = 4;
private readonly object[,] grid;
private readonly int leftBorder;
private readonly int topBorder;
private readonly int size;
private readonly int width;
private readonly int height;
/// <summary>
/// Constructor, ínitializes grid.
/// </summary>
/// <param name="settings">Settings</param>
public Collision(Settings settings)
{
width = settings.width;
height = settings.height;
size = settings.size;
grid = new object[width, height];
leftBorder = settings.leftBorder;
topBorder = settings.topBorder;
}
/// <summary>
/// Get object from grid.
/// </summary>
/// <param name="x">Horizontal grid position</param>
/// <param name="y">Vertical grid position</param>
/// <returns>Object</returns>
public ref object Get(int x, int y)
{
return ref grid[x, y];
}
/// <summary>
/// Get object from grid.
/// </summary>
/// <param name="position">Entity position</param>
/// <returns>Object</returns>
public ref object Get(Vector2 position)
{
return ref Get(X(position.X), Y(position.Y));
}
/// <summary>
/// Get object type from grid.
/// </summary>
/// <param name="x">Horizontal grid position</param>
/// <param name="y">Vertical grid position</param>
/// <param name="consume">Consume fruit</param>
/// <returns>Object type</returns>
public int GetType(int x, int y, bool consume = false)
{
if (x < 0 ||
y < 0 ||
x >= width ||
y >= height)
return invalid;
object current = grid[x, y];
if (current == null)
return empty;
if (current is BlockModule)
return block;
if (current is Worm)
return worm;
if (current is Fruits currentFruit)
{
if (consume)
{
currentFruit.Disable(x, y);
currentFruit.Spawn(); // The number or fruits stays constant
}
return fruit;
}
throw new CollisionException();
}
/// <summary>
/// Get object type from grid.
/// </summary>
/// <param name="position">Entity position</param>
/// <param name="consume">Consume fruit</param>
/// <returns>Object type</returns>
public int GetType(Vector2 position, bool consume = false)
{
return GetType(X(position.X), Y(position.Y), consume);
}
/// <summary>
/// Set object to grid.
/// </summary>
/// <param name="obj">Object</param>
/// <param name="x">Horizontal grid position</param>
/// <param name="y">Vertical grid position</param>
public void Set(object obj, int x, int y)
{
grid[x, y] = obj;
}
/// <summary>
/// Set object to grid.
/// </summary>
/// <param name="obj">Object</param>
/// <param name="position">Entity position</param>
public void Set(object obj, Vector2 position)
{
Set(obj, X(position.X), Y(position.Y));
}
/// <summary>
/// Set entity to grid.
/// </summary>
/// <param name="obj">Object</param>
/// <param name="x">Horizontal entity position</param>
/// <param name="y">Vertical entity position</param>
public void Set(object obj, float x, float y)
{
Set(obj, X(x), Y(y));
}
/// <summary>
/// Set block module to grid.
/// </summary>
/// <param name="module">Block module or null</param>
/// <param name="startX">module.X</param>
/// <param name="startY">module.Y</param>
/// <param name="width">module.Width</param>
/// <param name="height">module.Height</param>
public void Set(object module, int startX, int startY, int width, int height)
{
for (int x = startX; x < startX + width; x++)
for (int y = startY; y < startY + height; y++)
Set(module, x, y);
}
/// <summary>
/// Translate horizontal entity position to a grid one.
/// </summary>
/// <param name="x">Horizontal entity position</param>
/// <returns>Horizontal grid position</returns>
public int X(float x)
{
return (SimpleMath.Round(x) - leftBorder) / size;
}
/// <summary>
/// Translate vertical entity position to a grid one.
/// </summary>
/// <param name="y">Vertical entity position</param>
/// <returns>Vertical grid position</returns>
public int Y(float y)
{
return (SimpleMath.Round(y) - topBorder) / size;
}
/// <summary>
/// Translate horizontal grid position to an entity one.
/// </summary>
/// <param name="x">Horizontal grid position</param>
/// <returns>Horizontal entity position</returns>
public int EntityX(int x)
{
return leftBorder + size * x;
}
/// <summary>
/// Translates vertical grid position to an entity one.
/// </summary>
/// <param name="y">Vertical grid position</param>
/// <returns>Vertical entity position</returns>
public int EntityY(int y)
{
return topBorder + size * y;
}
/// <summary>
/// Clear grid.
/// </summary>
public void Reset()
{
for (int x = 0; x < width; x++)
for (int y = 0; y < height; y++)
{
grid[x, y] = null;
}
}
#if DEBUG
/// <summary>
/// Visualize collision grid in debug console as ASCII.
/// </summary>
public void Visualize()
{
System.Text.StringBuilder visualization = new System.Text.StringBuilder((width + 1) * height);
for (int y = 0; y < height; y++)
{
visualization.Append("\n");
for (int x = 0; x < width; x++)
{
object current = grid[x, y];
if (current == null)
{
visualization.Append('.');
continue;
}
if (current is BlockModule)
{
visualization.Append('x');
continue;
}
if (current is Worm)
{
visualization.Append('o');
continue;
}
if (current is Fruits)
{
visualization.Append('f');
continue;
}
throw new CollisionException();
}
}
Console.CursorTop = 0;
Console.WriteLine(visualization.ToString());
}
#endif
}
/// @author Antti Harju
/// @version v0.5
/// <summary>
/// Exception for collision.
/// </summary>
public class CollisionException : Exception
{
/// <summary>
/// Add custom exception message.
/// </summary>
public CollisionException() : base("Unknown collision object.") { }
}
}
|
# NodeumApi.TaskExecution
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**id** | **String** | | [optional]
**taskId** | **Number** | | [optional]
**name** | **String** | | [optional]
**workflowType** | **String** | | [optional]
**workflowAction** | **String** | | [optional]
**sourceType** | **String** | | [optional]
**destinationType** | **String** | | [optional]
**status** | **String** | | [optional]
**logTime** | **String** | | [optional]
**jobStarted** | **String** | | [optional]
**jobFinished** | **String** | | [optional]
**toProcessSize** | **Number** | | [optional]
**processedSize** | **Number** | | [optional]
**toProcessFiles** | **Number** | | [optional]
**processedFiles** | **Number** | | [optional]
**finalizedFiles** | **Number** | | [optional]
**estimationTime** | **Number** | | [optional]
**bandwidth** | **Number** | | [optional]
## Enum: WorkflowTypeEnum
* `active_archive` (value: `"active_archive"`)
* `offline_archive` (value: `"offline_archive"`)
* `data_exchange` (value: `"data_exchange"`)
* `data_migration` (value: `"data_migration"`)
* `maintenance` (value: `"maintenance"`)
* `data_enrichment` (value: `"data_enrichment"`)
## Enum: WorkflowActionEnum
* `copy` (value: `"copy"`)
* `move` (value: `"move"`)
* `erase` (value: `"erase"`)
* `scan` (value: `"scan"`)
* `rehydratation` (value: `"rehydratation"`)
* `format` (value: `"format"`)
* `check_consistency` (value: `"check_consistency"`)
* `duplication` (value: `"duplication"`)
* `cache_cleaning` (value: `"cache_cleaning"`)
* `ejection` (value: `"ejection"`)
* `get_index` (value: `"get_index"`)
* `full_backup` (value: `"full_backup"`)
* `incremental_backup` (value: `"incremental_backup"`)
## Enum: SourceTypeEnum
* `container` (value: `"container"`)
* `primary_nas` (value: `"primary_nas"`)
* `secondary_nas` (value: `"secondary_nas"`)
* `primary_cloud` (value: `"primary_cloud"`)
* `secondary_cloud` (value: `"secondary_cloud"`)
* `secondary_tape` (value: `"secondary_tape"`)
## Enum: DestinationTypeEnum
* `container` (value: `"container"`)
* `primary_nas` (value: `"primary_nas"`)
* `secondary_nas` (value: `"secondary_nas"`)
* `primary_cloud` (value: `"primary_cloud"`)
* `secondary_cloud` (value: `"secondary_cloud"`)
* `secondary_tape` (value: `"secondary_tape"`)
## Enum: StatusEnum
* `not_activated` (value: `"not_activated"`)
* `done` (value: `"done"`)
* `in_progress` (value: `"in_progress"`)
* `error` (value: `"error"`)
* `paused` (value: `"paused"`)
* `stopped_by_system` (value: `"stopped_by_system"`)
* `in_queue` (value: `"in_queue"`)
* `finished_with_warnings` (value: `"finished_with_warnings"`)
* `calculating` (value: `"calculating"`)
* `stopped_by_user` (value: `"stopped_by_user"`)
|
#!/bin/sh
# Precompile assets for production
bundle exec rake assets:precompile
echo "Assets Pre-compiled!"
bundle exec rake webpacker:compile
echo "Ran Webpacker Compile!"
|
<?php declare(strict_types = 1);
namespace Contributte\GopayInline\Api\Lists;
class PaymentState
{
// Payment created
public const CREATED = 'CREATED';
// Payment method chosen
public const PAYMENT_METHOD_CHOSEN = 'PAYMENT_METHOD_CHOSEN';
// Payment paid
public const PAID = 'PAID';
// Payment pre-authorized
public const AUTHORIZED = 'AUTHORIZED';
// Payment canceled
public const CANCELED = 'CANCELED';
// Payment timeouted
public const TIMEOUTED = 'TIMEOUTED';
// Payment refunded
public const REFUNDED = 'REFUNDED';
// Payment partially refunded
public const PARTIALLY_REFUNDED = 'PARTIALLY_REFUNDED';
}
|
import { Logger } from '@hmcts/nodejs-logging';
import autobind from 'autobind-decorator';
import axios, { AxiosResponse } from 'axios';
import config from 'config';
import { Response } from 'express';
import { v4 as uuid } from 'uuid';
import { CITIZEN_UPDATE } from '../../../app/case/definition';
import { AppRequest } from '../../../app/controller/AppRequest';
import { CHECK_ANSWERS_URL } from '../../urls';
import { createToken } from './createToken';
const logger = Logger.getLogger('PCQGetController');
@autobind
export default class PCQGetController {
public async get(req: AppRequest, res: Response): Promise<void> {
if (!req.session.userCase.applicant1PcqId) {
const url = config.get('services.equalityAndDiversity.url');
const path: string = config.get('services.equalityAndDiversity.path');
const health = `${url}/health`;
try {
const response: AxiosResponse<StatusResponse> = await axios.get(health);
if (response.data.status && response.data.status === 'UP') {
req.session.userCase.applicant1PcqId = uuid();
} else {
return res.redirect(CHECK_ANSWERS_URL);
}
} catch (err) {
logger.error('Could not connect to PCQ: ', err.message);
return res.redirect(CHECK_ANSWERS_URL);
}
const protocol = req.app.locals.developmentMode ? 'http://' : '';
const port = req.app.locals.developmentMode ? `:${config.get('port')}` : '';
const params = {
serviceId: 'NEW_DIVORCE_LAW',
actor: 'APPLICANT1',
pcqId: req.session.userCase.applicant1PcqId,
partyId: req.session.user.email,
returnUrl: `${protocol}${res.locals.host}${port}${CHECK_ANSWERS_URL}`,
language: req.session.lang || 'en',
};
params['token'] = createToken(params);
params.partyId = encodeURIComponent(params.partyId);
try {
req.session.userCase = await req.locals.api.triggerEvent(
req.session.userCase.id,
{ applicant1PcqId: req.session.userCase.applicant1PcqId },
CITIZEN_UPDATE
);
} catch (err) {
req.locals.logger.error('Error updating PCQ ID for Applicant 1', err);
res.redirect(CHECK_ANSWERS_URL);
}
const qs = Object.keys(params)
.map(key => `${key}=${params[key]}`)
.join('&');
req.session.save(err => {
if (err) {
throw err;
}
res.redirect(`${url}${path}?${qs}`);
});
} else {
res.redirect(CHECK_ANSWERS_URL);
}
}
}
export interface StatusResponse {
status: 'UP' | 'DOWN' | undefined;
}
|
package com.github.mdr.mash.evaluator
import com.github.mdr.mash.functions.MashCallable
import com.github.mdr.mash.parser.Provenance
import com.github.mdr.mash.utils.{ LineInfo, Point, PointedRegion }
case class SourceLocation(provenance: Provenance, pointedRegion: PointedRegion) {
def source = pointedRegion.of(provenance.source)
def reindentedSource: String = {
val lineInfo = new LineInfo(provenance.source)
val Point(lineIndex, column) = lineInfo.lineAndColumn(pointedRegion.region.offset)
val indented = lineInfo.line(lineIndex).take(column).forall(_ == ' ')
if (indented)
reindent(source, column)
else
source
}
private def reindent(source: String, indent: Int): String = {
val lineInfo = new LineInfo(source)
val sourceLines = lineInfo.lines
val firstLine = sourceLines.take(1)
val laterLines = sourceLines.drop(1).map { line ⇒
if (line.take(indent).forall(_ == ' '))
line.drop(indent)
else
line
}
(firstLine ++ laterLines).mkString("\n")
}
}
case class StackTraceItem(locationOpt: Option[SourceLocation], functionOpt: Option[MashCallable] = None)
object EvaluatorException {
def apply(message: String, locationOpt: Option[SourceLocation]): EvaluatorException =
EvaluatorException(message, List(StackTraceItem(locationOpt)))
}
case class EvaluatorException(
message: String,
stack: List[StackTraceItem] = Nil,
cause: Throwable = null)
extends RuntimeException(message, cause) {
def causeOpt: Option[Throwable] = Option(cause)
def lastWasFunction(functionOpt: Option[MashCallable]): EvaluatorException = {
val newStack = stack match {
case Nil ⇒ Nil
case head :: tail ⇒ head.copy(functionOpt = functionOpt) :: tail
}
copy(stack = newStack)
}
def push(locationOpt: Option[SourceLocation]): EvaluatorException =
copy(stack = StackTraceItem(locationOpt) :: stack)
}
|
---
id: js-quickstart
title: Quickstart for Sauce Labs with Cypress, Playwright, and TestCafe
sidebar_label: Getting Started with JavaScript Testing
description: Basic steps for getting going quickly with JavaScript based frameworks using saucectl
---
<p><span className="sauceRed">PAGE DEPRECATED</span></p>
Please refer to the dedicated documentation for supported JavaScript frameworks for information about using `saucectl` to run your web app automation tests.
* [Cypress](/web-apps/automated-testing/cypress)
* [Playwright](/web-apps/automated-testing/playwright)
* [TestCafe](/web-apps/automated-testing/testcafe)
* [](/web-apps/automated-testing/puppeteer)
|
#! /bin/bash
echo "Docker build assitant for travis..."
set -ex
DOCKER_NODE="arm32v7/node:10-buster"
[ -n "$1" ] && DOCKER_NODE="$1"
echo "Building with docker for node $DOCKER_NODE"
#docker run --rm --privileged multiarch/qemu-user-static:register --reset -p yes
#docker run -t --rm -v $(pwd):/root/node-nrf24 \
# --workdir /root/node-nrf24 \
# $DOCKER_NODE \
# /bin/bash -c "npm install node-gyp -g && ./build_rf24libs.sh && node-gyp rebuild"
# /bin/bash -c "node --version"
docker run -t --rm --entrypoint="/bin/sh" -v$(pwd):/root/app --workdir /root/app $NODE_VERSION \
-c "npm --version && npm install && npm install node-gyp -g && ./build_rf24libs.sh && node-gyp rebuild"
echo "Finished with: $?"
echo "done!"
|
# encoding: UTF-8
module Rivet
VERSION = '3.2.0'
end
|
/*
* Copyright 2013 Chiwan Park
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.chiwanpark.push.database
import com.chiwanpark.push.util.PushJsonProtocol._
import slick.driver.PostgresDriver.api._
import slick.lifted.Tag
import spray.json._
import scala.language.implicitConversions
case class Certificate(id: Option[Int] = None, certType: String, name: String, value: String)
case class APNSCertificate(id: Option[Int] = None, name: String, mode: String, certificate: String, password: String)
object CertificateConversion {
implicit def CertificateToAPNSCertificate(certificate: Certificate): APNSCertificate = certificate.certType match {
case "apns" =>
val value = certificate.value.parseJson.convertTo[Map[String, String]]
APNSCertificate(certificate.id, certificate.name, value("mode"), value("certificate"), value("password"))
case _ => throw new IllegalArgumentException("Cannot convert the given certificate to APNSCertificate!")
}
implicit def APNSCertificateToCertificate(certificate: APNSCertificate): Certificate = {
val value = Map("mode" -> certificate.mode, "certificate" -> certificate.certificate,
"password" -> certificate.password)
Certificate(certificate.id, "apns", certificate.name, value.toJson.compactPrint)
}
}
class CertificateTable(tag: Tag) extends Table[Certificate](tag, "push_certificates") {
def id = column[Int]("CERTIFICATE_ID", O.PrimaryKey, O.AutoInc)
def certType = column[String]("CERTIFICATE_TYPE")
def name = column[String]("NAME")
def value = column[String]("VALUE")
override def * = (id.?, certType, name, value) <>(Certificate.tupled, Certificate.unapply)
}
object CertificateQuery extends TableQuery(new CertificateTable(_)) {
def insert(certificiate: Certificate) = (this returning this.map(_.id)) += certificiate
def selectById(id: Int) = this.filter(_.id === id).result
}
|
-- Largest Square Inside A Circle
-- https://www.codewars.com/kata/5887a6fe0cfe64850800161c
module Kata (areaLargestSquare) where
areaLargestSquare :: Double -> Double
areaLargestSquare = (*2) . (^2)
|
## intent:greet
- hey
- hello
- hi
- good morning
- good evening
- hey there
## intent:goodbye
- bye
- goodbye
- see you around
- see you later
## intent:mood_affirm
- yes
- indeed
- of course
- that sounds good
- correct
## intent:mood_deny
- no
- never
- I don't think so
- don't like that
- no way
- not really
## intent:mood_great
- perfect
- very good
- great
- amazing
- wonderful
- I am feeling very good
- I am great
- I'm good
## intent:mood_unhappy
- sad
- very sad
- unhappy
- bad
- very bad
- awful
- terrible
- not very good
- extremly sad
- so sad
## intent:inbox
- show me my inbox items
- show me [high](priority) priority items in my inbox
- what is there in my inbox
- whats up
- what should i do today
## intent:dashboard
- plot dashboard for my projects
- plot organization dashboard
- show me my dashboard
- what is the status of the project
## intent:group-by-priority
- Group by Priority
## intent:group-by-item-type
- Group by Item-type
## lookup:priority
- High
- Critical
- low
|
package com.hariofspades.blockchain.inject
import com.hariofspades.blockchain.BuildConfig
import com.hariofspades.domain.repository.TransactionRemote
import com.hariofspades.remote.TransactionRemoteImpl
import com.hariofspades.remote.mapper.TransactionHistoryMapper
import com.hariofspades.remote.mapper.TransactionItemMapper
import com.hariofspades.remote.service.TransactionHistoryService
import com.hariofspades.remote.service.TransactionHistoryServiceFactory
import org.kodein.di.Kodein
import org.kodein.di.generic.bind
import org.kodein.di.generic.instance
import org.kodein.di.generic.provider
import org.kodein.di.generic.singleton
val remoteModule = Kodein.Module("Remote Module") {
bind<TransactionHistoryService>() with singleton {
TransactionHistoryServiceFactory.makeTransactionHistoryService(BuildConfig.DEBUG)
}
bind<TransactionItemMapper>() with singleton { TransactionItemMapper() }
bind<TransactionHistoryMapper>() with singleton { TransactionHistoryMapper(instance()) }
bind<TransactionRemote>() with provider { TransactionRemoteImpl(instance(), instance()) }
}
|
# setup-yq-action
GitHub Action to setup the `jq` and `yq` command for yaml and json parsing
Example of use :
```yaml
name: Release
on: [ "push" ]
jobs:
parse_yaml:
name: Parse Yaml
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install yq
id: setup-yq
uses: shiipou/[email protected]
- name: get version
run: |
VERSION=$(${{ steps.setup-yq.outputs.yq-binary }} '.dependency.my_component.git.ref' pubspec.yaml)
```
|
/*******************************************************************************
Copyright(c) 2015-2021 Parker Hannifin Corp. All rights reserved.
MIT Licensed. See the included LICENSE.txt for a copy of the full MIT License.
*******************************************************************************/
#include "mscl/MicroStrain/Wireless/Packets/WirelessPacket.h"
#include "mscl/MicroStrain/Wireless/Packets/WirelessPacketUtils.h"
#include "mscl/MicroStrain/ByteStream.h"
#include "mscl/MicroStrain/DataBuffer.h"
#include "mscl/MicroStrain/Wireless/Packets/WirelessPacketCollector.h"
#include "mscl/MicroStrain/Wireless/WirelessParser.h"
#include "mscl/MicroStrain/ResponseCollector.h"
#include <boost/test/unit_test.hpp>
#include <turtle/mock.hpp>
using namespace mscl;
BOOST_AUTO_TEST_SUITE(WirelessPacketUtils_Test)
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckFalse)
{
//create a packet with an unknown packet type
WirelessPacket packet;
packet.type(WirelessPacket::packetType_unknown);
//verify that the integrity check is false
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), false);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_LDC)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
//a valid ASPP packet that has an LDC packet type
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x04; //LDC Packet Type
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x08; //payload length
bw.buffer()[6] = 0x02; //first byte in payload of 0x02 signifies sync sampling packet type
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x00;
bw.buffer()[9] = 0x03; //data type
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0x1A;
bw.commit(18);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_11), WirelessParser::parsePacketResult_completePacket);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), true);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_BufferedLdc)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x0D; //Buffered LDC Packet Type
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x08; //payload length
bw.buffer()[6] = 0x02; //app id
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x70;
bw.buffer()[9] = 0x03; //data type
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0x93;
bw.commit(18);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_15), WirelessParser::parsePacketResult_completePacket);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), true);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_Ldc16ch)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
//a valid ASPP packet that has an LDC packet type
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = WirelessPacket::packetType_LDC_16ch;
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x08; //payload length
bw.buffer()[6] = 0x00; //channel mask
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x70; //sample rate
bw.buffer()[9] = 0x23; //app id / data type
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0xB8;
bw.commit(18);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_15), WirelessParser::parsePacketResult_completePacket);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), true);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_SyncSampling16ch_noChannelsInPayload)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x1A; //Sync Sampling 16ch Packet Type
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x0E; //payload length
bw.buffer()[6] = 0x00; //channel mask
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x70; //sample rate
bw.buffer()[9] = 0x23; //app id / data type
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0x00;
bw.buffer()[18] = 0x00;
bw.buffer()[19] = 0x00;
bw.buffer()[20] = 0x00;
bw.buffer()[21] = 0x00;
bw.buffer()[22] = 0x00;
bw.buffer()[23] = 0xC4;
bw.commit(24);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), false);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_SyncSampling16ch)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x1A; //Sync Sampling 16ch Packet Type
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x10; //payload length
bw.buffer()[6] = 0x00; //channel mask
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x70; //sample rate
bw.buffer()[9] = 0x23; //app id / data type
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0x00;
bw.buffer()[18] = 0x00;
bw.buffer()[19] = 0x00;
bw.buffer()[20] = 0x00;
bw.buffer()[21] = 0x00;
bw.buffer()[22] = 0x00;
bw.buffer()[23] = 0x00;
bw.buffer()[24] = 0x00;
bw.buffer()[25] = 0xC6;
bw.commit(26);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_15), WirelessParser::parsePacketResult_completePacket);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), true);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_BufferedLdc16ch)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x1D; //Buffered LDC 16ch Packet Type
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x08; //payload length
bw.buffer()[6] = 0x00; //channel mask
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x70; //sample rate
bw.buffer()[9] = 0x23; //app id / data type
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0xC1;
bw.commit(18);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_15), WirelessParser::parsePacketResult_completePacket);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), true);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_AsyncDigital)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x0E; //Async Digital Packet
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x10; //payload length
bw.buffer()[6] = 0x00; //channel mask
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x00;
bw.buffer()[9] = 0x00;
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0x00;
bw.buffer()[18] = 0x00;
bw.buffer()[19] = 0x00;
bw.buffer()[20] = 0x00;
bw.buffer()[21] = 0x01;
bw.buffer()[22] = 0x00;
bw.buffer()[23] = 0x00;
bw.buffer()[24] = 0x00;
bw.buffer()[25] = 0x28;
bw.commit(26);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_15), WirelessParser::parsePacketResult_completePacket);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), true);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_IntegrityCheckTrue_AsyncDigitalAnalog)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x0F; //Async Digital/Analog Packet
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x13; //payload length
bw.buffer()[6] = 0x00; //channel mask
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x03; //data type
bw.buffer()[9] = 0x00;
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0x00;
bw.buffer()[18] = 0x00;
bw.buffer()[19] = 0x00;
bw.buffer()[20] = 0x00;
bw.buffer()[21] = 0x00;
bw.buffer()[22] = 0x01;
bw.buffer()[23] = 0x00;
bw.buffer()[24] = 0x00;
bw.buffer()[25] = 0x00;
bw.buffer()[26] = 0x00;
bw.buffer()[27] = 0x00;
bw.buffer()[28] = 0x2F;
bw.commit(29);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_15), WirelessParser::parsePacketResult_completePacket);
BOOST_CHECK_EQUAL(WirelessPacketUtils::packetIntegrityCheck(packet), true);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_CorrectPacketType_TCLink)
{
WirelessPacket packet;
DataBuffer b(1024);
BufferWriter bw = b.getBufferWriter();
//a valid ASPP packet that has a TC-LinkLDC type, but actually is a sync sampling packet
bw.buffer()[0] = 0xAA;
bw.buffer()[1] = 0x07;
bw.buffer()[2] = 0x09; //TC-LinkLDC type
bw.buffer()[3] = 0x00;
bw.buffer()[4] = 0x01;
bw.buffer()[5] = 0x10; //payload length
bw.buffer()[6] = 0x02; //first byte in payload of 0x02 signifies sync sampling packet type
bw.buffer()[7] = 0x01; //channel mask
bw.buffer()[8] = 0x00;
bw.buffer()[9] = 0x03; //data type
bw.buffer()[10] = 0x00;
bw.buffer()[11] = 0x00;
bw.buffer()[12] = 0x00;
bw.buffer()[13] = 0x00;
bw.buffer()[14] = 0x00;
bw.buffer()[15] = 0x00;
bw.buffer()[16] = 0x00;
bw.buffer()[17] = 0x00;
bw.buffer()[18] = 0x00;
bw.buffer()[19] = 0x00;
bw.buffer()[20] = 0x00;
bw.buffer()[21] = 0x00;
bw.buffer()[22] = 0x00;
bw.buffer()[23] = 0x00;
bw.buffer()[24] = 0x00;
bw.buffer()[25] = 0x27;
bw.commit(26);
std::shared_ptr<ResponseCollector> rc(new ResponseCollector);
WirelessPacketCollector packetCollector;
RawBytePacketCollector rawBytePacketCollector;
WirelessParser parser(packetCollector, rc, rawBytePacketCollector);
//build the packet by calling parseAsPacket
BOOST_CHECK_EQUAL(parser.parseAsPacket(b, packet, WirelessTypes::freq_15), WirelessParser::parsePacketResult_completePacket);
//check that the packet's type has been changed to a sync sampling type
BOOST_CHECK_EQUAL(packet.type(), WirelessPacket::packetType_SyncSampling);
}
BOOST_AUTO_TEST_CASE(WirelessPacketUtils_CorrectPacketType_SHMLink)
{
WirelessPacket packet;
packet.type(WirelessPacket::packetType_LDC);
Bytes b;
b.push_back(WirelessPacket::packetType_SHM);
packet.payload(b);
WirelessPacketUtils::correctPacketType(packet);
//check that the packet's type has been changed to a SHM
BOOST_CHECK_EQUAL(packet.type(), WirelessPacket::packetType_SHM);
}
BOOST_AUTO_TEST_SUITE_END()
|
import { WalletProviderState } from 'app/containers/WalletProvider/types';
import { TradingPageState } from 'app/containers/TradingPage/types';
import { FastBtcFormState } from 'app/containers/FastBtcForm/types';
import { LendBorrowSovrynState } from 'app/containers/LendBorrowSovryn/types';
import { EventsStoreState } from '../store/global/events-store/types';
import { TransactionsStoreState } from '../store/global/transactions-store/types';
import { TutorialDialogModalState } from 'app/containers/TutorialDialogModal/types';
// [IMPORT NEW CONTAINERSTATE ABOVE] < Needed for generating containers seamlessly
/*
Because the redux-injectors injects your reducers asynchronously somewhere in your code
You have to declare them here manually
*/
export interface RootState {
walletProvider?: WalletProviderState;
tradingPage?: TradingPageState;
fastBtcForm?: FastBtcFormState;
lendBorrowSovryn?: LendBorrowSovrynState;
eventsState?: EventsStoreState;
transactionsState?: TransactionsStoreState;
tutorialDialogModal?: TutorialDialogModalState;
// [INSERT NEW REDUCER KEY ABOVE] < Needed for generating containers seamlessly
}
|
<?php
namespace Modules\GameServer\Entities;
use App\Model\Model;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Collection;
use Modules\Event\Entities\Event;
use Modules\Game\Entities\Game;
use Modules\Match\Entities\MatchModel;
/**
* Class GameServer
* @package Modules\GameServer\Entities
*
* Contains:
* @property int $id
* @property string $name
* @property string $ip
* @property int $port
* @property string $url
* @property string $password
* @property bool $pending
*
* Has:
* @property Game $game
* @property GameServerStatus $status
*
* Belongs to:
* @property MatchModel[]|Collection $matches
* @property Event $event
*/
class GameServer extends Model
{
const TABLE_NAME = "game_servers";
public function getTableName(): string
{
return self::TABLE_NAME;
}
public function generateInitialBlueprint(Blueprint $table)
{
$table->id();
$table->timestamps();
$table->string("name");
$table->string("ip")->nullable();
$table->integer("port")->nullable();
$table->string("url");
$table->string("password")->nullable();
$table->integer("game_id")->nullable()->index();
$table->integer("event_id")->nullable()->index();
$table->boolean("pending")->default(true);
// TODO: status
// TODO: game_server_match pivot
}
public $fillable = [
"name",
"ip",
"port",
"url",
"password",
"game_id",
"event_id",
"pending"
];
public function matches()
{
return $this->belongsToMany(MatchModel::class, 'game_server_match', 'game_server_id', 'match_id');
}
public function event()
{
return $this->belongsTo(Event::class);
}
}
|
/*
* Copyright (c) 2017, the Dart project authors. Please see the AUTHORS
* file for details. All rights reserved. Use of this source code is governed
* by a BSD-style license that can be found in the LICENSE file.
*/
/**
* @assertion Capability pauseCapability
* read-only
* Capability granting the ability to pause the isolate.
* This capability is used by pause. If the capability is not the correct
* pause capability of the isolate, including if the capability is null,
* then calls to pause will have no effect.
*
* @description Check that pauseCapability is not null, if isolate is started
* by method spawn() without paused state
*
* @author [email protected]
*/
import "../../../Utils/expect.dart";
import "IsolateUtil.dart";
test() async {
ErrorServer server = await ErrorServer.spawn();
Expect.isNotNull(server.isolate.pauseCapability);
// clean up
await server.stop();
asyncEnd();
}
main() {
asyncStart();
test();
}
|
/*
*
* Copyright 2020 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.graphql.dgs.codegen.generators.java
import com.netflix.graphql.dgs.client.codegen.BaseProjectionNode
import com.netflix.graphql.dgs.client.codegen.BaseSubProjectionNode
import com.netflix.graphql.dgs.client.codegen.GraphQLQuery
import com.netflix.graphql.dgs.codegen.*
import com.netflix.graphql.dgs.codegen.generators.shared.ClassnameShortener
import com.squareup.javapoet.*
import graphql.introspection.Introspection.TypeNameMetaFieldDef
import graphql.language.*
import javax.lang.model.element.Modifier
class ClientApiGenerator(private val config: CodeGenConfig, private val document: Document) {
private val generatedClasses = mutableSetOf<String>()
private val typeUtils = TypeUtils(getDatatypesPackageName(), config, document)
fun generate(definition: ObjectTypeDefinition): CodeGenResult {
return definition.fieldDefinitions.filterIncludedInConfig(definition.name, config).filterSkipped().map {
val javaFile = createQueryClass(it, definition.name)
val rootProjection = it.type.findTypeDefinition(document, true)?.let { typeDefinition -> createRootProjection(typeDefinition, it.name.capitalize()) }
?: CodeGenResult()
CodeGenResult(javaQueryTypes = listOf(javaFile)).merge(rootProjection)
}.fold(CodeGenResult()) { total, current -> total.merge(current) }
}
fun generateEntities(definitions: List<ObjectTypeDefinition>): CodeGenResult {
if (config.skipEntityQueries) {
return CodeGenResult()
}
var entitiesRootProjection = CodeGenResult()
// generate for federation types, if present
val federatedTypes = definitions.filter { it.hasDirective("key") }
if (federatedTypes.isNotEmpty()) {
// create entities root projection
entitiesRootProjection = createEntitiesRootProjection(federatedTypes)
}
return CodeGenResult().merge(entitiesRootProjection)
}
private fun createQueryClass(it: FieldDefinition, operation: String): JavaFile {
val javaType = TypeSpec.classBuilder("${it.name.capitalize()}GraphQLQuery")
.addModifiers(Modifier.PUBLIC).superclass(ClassName.get(GraphQLQuery::class.java))
if (it.description != null) {
javaType.addJavadoc(it.description.content.lines().joinToString("\n"))
}
javaType.addMethod(
MethodSpec.methodBuilder("getOperationName")
.addModifiers(Modifier.PUBLIC)
.returns(String::class.java)
.addAnnotation(Override::class.java)
.addCode(
"""
| return "${it.name}";
|
""".trimMargin()
).build()
)
val setType = ClassName.get(Set::class.java)
val setOfStringType = ParameterizedTypeName.get(setType, ClassName.get(String::class.java))
val builderClass = TypeSpec.classBuilder("Builder").addModifiers(Modifier.STATIC, Modifier.PUBLIC)
.addMethod(
MethodSpec.methodBuilder("build")
.addModifiers(Modifier.PUBLIC)
.returns(ClassName.get("", "${it.name.capitalize()}GraphQLQuery"))
.addCode(
if (it.inputValueDefinitions.isNotEmpty())
"""
|return new ${it.name.capitalize()}GraphQLQuery(${it.inputValueDefinitions.joinToString(", ") { ReservedKeywordSanitizer.sanitize(it.name) }}, fieldsSet);
|
""".trimMargin() else
"""
|return new ${it.name.capitalize()}GraphQLQuery();
""".trimMargin()
)
.build()
).addField(FieldSpec.builder(setOfStringType, "fieldsSet", Modifier.PRIVATE).initializer("new \$T<>()", ClassName.get(HashSet::class.java)).build())
val constructorBuilder = MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
constructorBuilder.addCode(
"""
|super("${operation.toLowerCase()}");
|
""".trimMargin()
)
it.inputValueDefinitions.forEach { inputValue ->
val findReturnType = TypeUtils(getDatatypesPackageName(), config, document).findReturnType(inputValue.type)
val methodBuilder = MethodSpec.methodBuilder(ReservedKeywordSanitizer.sanitize(inputValue.name))
.addParameter(findReturnType, ReservedKeywordSanitizer.sanitize(inputValue.name))
.returns(ClassName.get("", "Builder"))
.addModifiers(Modifier.PUBLIC)
.addCode(
"""
|this.${ReservedKeywordSanitizer.sanitize(inputValue.name)} = ${ReservedKeywordSanitizer.sanitize(inputValue.name)};
|this.fieldsSet.add("${inputValue.name}");
|return this;
""".trimMargin()
)
if (inputValue.description != null) {
methodBuilder.addJavadoc(inputValue.description.content.lines().joinToString("\n"))
}
builderClass.addMethod(methodBuilder.build())
.addField(findReturnType, ReservedKeywordSanitizer.sanitize(inputValue.name), Modifier.PRIVATE)
constructorBuilder.addParameter(findReturnType, ReservedKeywordSanitizer.sanitize(inputValue.name))
if (findReturnType.isPrimitive) {
constructorBuilder.addCode(
"""
|getInput().put("${inputValue.name}", ${ReservedKeywordSanitizer.sanitize(inputValue.name)});
""".trimMargin()
)
} else {
constructorBuilder.addCode(
"""
|if (${inputValue.name} != null || fieldsSet.contains("${inputValue.name}")) {
| getInput().put("${inputValue.name}", ${ReservedKeywordSanitizer.sanitize(inputValue.name)});
|}
""".trimMargin()
)
}
}
if (it.inputValueDefinitions.size > 0) {
constructorBuilder.addParameter(setOfStringType, "fieldsSet")
}
javaType.addMethod(constructorBuilder.build())
// No-arg constructor
if (it.inputValueDefinitions.size > 0) {
javaType.addMethod(
MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC)
.addStatement("super(\"${operation.toLowerCase()}\")")
.build()
)
}
javaType.addMethod(
MethodSpec.methodBuilder("newRequest")
.addModifiers(Modifier.STATIC, Modifier.PUBLIC)
.returns(ClassName.get("", "Builder"))
.addCode("return new Builder();\n")
.build()
)
javaType.addType(builderClass.build())
return JavaFile.builder(getPackageName(), javaType.build()).build()
}
private fun createRootProjection(type: TypeDefinition<*>, prefix: String): CodeGenResult {
val clazzName = "${prefix}ProjectionRoot"
val javaType = TypeSpec.classBuilder(clazzName)
.addModifiers(Modifier.PUBLIC).superclass(ClassName.get(BaseProjectionNode::class.java))
if (generatedClasses.contains(clazzName)) return CodeGenResult() else generatedClasses.add(clazzName)
val fieldDefinitions = type.fieldDefinitions() + document.definitions.filterIsInstance<ObjectTypeExtensionDefinition>().filter { it.name == type.name }.flatMap { it.fieldDefinitions }
val codeGenResult = fieldDefinitions
.filterSkipped()
.mapNotNull {
val typeDefinition = it.type.findTypeDefinition(
document,
excludeExtensions = true,
includeBaseTypes = it.inputValueDefinitions.isNotEmpty(),
includeScalarTypes = it.inputValueDefinitions.isNotEmpty()
)
if (typeDefinition != null) it to typeDefinition else null
}
.map { (fieldDef, typeDef) ->
val projectionName = "${prefix}_${fieldDef.name.capitalize()}Projection"
if (typeDef !is ScalarTypeDefinition) {
val noArgMethodBuilder = MethodSpec.methodBuilder(ReservedKeywordSanitizer.sanitize(fieldDef.name))
.returns(ClassName.get(getPackageName(), projectionName))
.addCode(
"""
|$projectionName projection = new $projectionName(this, this);
|getFields().put("${fieldDef.name}", projection);
|return projection;
""".trimMargin()
)
.addModifiers(Modifier.PUBLIC)
javaType.addMethod(noArgMethodBuilder.build())
}
if (fieldDef.inputValueDefinitions.isNotEmpty()) {
addFieldSelectionMethodWithArguments(fieldDef, projectionName, javaType, projectionRoot = "this")
}
val processedEdges = mutableSetOf<Pair<String, String>>()
processedEdges.add(Pair(typeDef.name, type.name))
createSubProjection(typeDef, javaType.build(), javaType.build(), "${prefix}_${fieldDef.name.capitalize()}", processedEdges, 1)
}
.fold(CodeGenResult()) { total, current -> total.merge(current) }
fieldDefinitions.filterSkipped().forEach {
val objectTypeDefinition = it.type.findTypeDefinition(document)
if (objectTypeDefinition == null) {
javaType.addMethod(
MethodSpec.methodBuilder(ReservedKeywordSanitizer.sanitize(it.name))
.returns(ClassName.get(getPackageName(), javaType.build().name))
.addCode(
"""
|getFields().put("${it.name}", null);
|return this;
""".trimMargin()
)
.addModifiers(Modifier.PUBLIC)
.build()
)
}
}
val concreteTypesResult = createConcreteTypes(type, javaType.build(), javaType, prefix, mutableSetOf<Pair<String, String>>(), 0)
val unionTypesResult = createUnionTypes(type, javaType, javaType.build(), prefix, mutableSetOf<Pair<String, String>>(), 0)
val javaFile = JavaFile.builder(getPackageName(), javaType.build()).build()
return CodeGenResult(clientProjections = listOf(javaFile)).merge(codeGenResult).merge(concreteTypesResult).merge(unionTypesResult)
}
private fun addFieldSelectionMethodWithArguments(
fieldDefinition: FieldDefinition,
projectionName: String,
javaType: TypeSpec.Builder,
projectionRoot: String
): TypeSpec.Builder? {
val methodBuilder = MethodSpec.methodBuilder(ReservedKeywordSanitizer.sanitize(fieldDefinition.name))
.returns(ClassName.get(getPackageName(), projectionName))
.addCode(
"""
|$projectionName projection = new $projectionName(this, $projectionRoot);
|getFields().put("${fieldDefinition.name}", projection);
|getInputArguments().computeIfAbsent("${fieldDefinition.name}", k -> new ${'$'}T<>());
|${
fieldDefinition.inputValueDefinitions.joinToString("\n") { input ->
"""
|InputArgument ${input.name}Arg = new InputArgument("${input.name}", ${input.name});
|getInputArguments().get("${fieldDefinition.name}").add(${input.name}Arg);
""".trimMargin()
}
}
|return projection;
""".trimMargin(),
ArrayList::class.java
)
.addModifiers(Modifier.PUBLIC)
fieldDefinition.inputValueDefinitions.forEach { input ->
methodBuilder.addParameter(ParameterSpec.builder(typeUtils.findReturnType(input.type), input.name).build())
}
return javaType.addMethod(methodBuilder.build())
}
private fun createEntitiesRootProjection(federatedTypes: List<ObjectTypeDefinition>): CodeGenResult {
val clazzName = "EntitiesProjectionRoot"
val javaType = TypeSpec.classBuilder(clazzName)
.addModifiers(Modifier.PUBLIC).superclass(ClassName.get(BaseProjectionNode::class.java))
if (generatedClasses.contains(clazzName)) return CodeGenResult() else generatedClasses.add(clazzName)
val codeGenResult = federatedTypes.map { objTypeDef ->
javaType.addMethod(
MethodSpec.methodBuilder("on${objTypeDef.name}")
.addModifiers(Modifier.PUBLIC)
.returns(ClassName.get(getPackageName(), "Entities${objTypeDef.name.capitalize()}KeyProjection"))
.addCode(
"""
| Entities${objTypeDef.name.capitalize()}KeyProjection fragment = new Entities${objTypeDef.name.capitalize()}KeyProjection(this, this);
| getFragments().add(fragment);
| return fragment;
""".trimMargin()
)
.build()
)
val processedEdges = mutableSetOf<Pair<String, String>>()
createFragment(objTypeDef, javaType.build(), javaType.build(), "Entities${objTypeDef.name.capitalize()}Key", processedEdges, 0)
}.fold(CodeGenResult()) { total, current -> total.merge(current) }
val javaFile = JavaFile.builder(getPackageName(), javaType.build()).build()
return CodeGenResult(clientProjections = listOf(javaFile)).merge(codeGenResult)
}
private fun createConcreteTypes(type: TypeDefinition<*>, root: TypeSpec, javaType: TypeSpec.Builder, prefix: String, processedEdges: Set<Pair<String, String>>, queryDepth: Int): CodeGenResult {
return if (type is InterfaceTypeDefinition) {
val concreteTypes = document.getDefinitionsOfType(ObjectTypeDefinition::class.java).filter {
it.implements.filterIsInstance<NamedNode<*>>().find { iface -> iface.name == type.name } != null
}
concreteTypes.map {
addFragmentProjectionMethod(javaType, root, prefix, it, processedEdges, queryDepth)
}.fold(CodeGenResult()) { total, current -> total.merge(current) }
} else {
CodeGenResult()
}
}
private fun createUnionTypes(type: TypeDefinition<*>, javaType: TypeSpec.Builder, rootType: TypeSpec, prefix: String, processedEdges: Set<Pair<String, String>>, queryDepth: Int): CodeGenResult {
return if (type is UnionTypeDefinition) {
val memberTypes = type.memberTypes.mapNotNull { it.findTypeDefinition(document, true) }.toList()
memberTypes.map {
addFragmentProjectionMethod(javaType, rootType, prefix, it, processedEdges, queryDepth)
}.fold(CodeGenResult()) { total, current -> total.merge(current) }
} else {
CodeGenResult()
}
}
private fun addFragmentProjectionMethod(javaType: TypeSpec.Builder, rootType: TypeSpec, prefix: String, it: TypeDefinition<*>, processedEdges: Set<Pair<String, String>>, queryDepth: Int): CodeGenResult {
val rootRef = if (javaType.build().name == rootType.name) "this" else "getRoot()"
val projectionName = "${prefix}_${it.name.capitalize()}Projection"
javaType.addMethod(
MethodSpec.methodBuilder("on${it.name}")
.addModifiers(Modifier.PUBLIC)
.returns(ClassName.get(getPackageName(), projectionName))
.addCode(
"""
|$projectionName fragment = new $projectionName(this, $rootRef);
|getFragments().add(fragment);
|return fragment;
""".trimMargin()
)
.build()
)
return createFragment(it as ObjectTypeDefinition, javaType.build(), rootType, "${prefix}_${it.name.capitalize()}", processedEdges, queryDepth)
}
private fun createFragment(type: ObjectTypeDefinition, parent: TypeSpec, root: TypeSpec, prefix: String, processedEdges: Set<Pair<String, String>>, queryDepth: Int): CodeGenResult {
val subProjection = createSubProjectionType(type, parent, root, prefix, processedEdges, queryDepth)
?: return CodeGenResult()
val javaType = subProjection.first
val codeGenResult = subProjection.second
// We don't need the typename added for fragments in the entities' projection.
// This affects deserialization when use directly with generated classes
if (prefix != "Entities${type.name.capitalize()}Key") {
javaType.addInitializerBlock(
CodeBlock.builder()
.addStatement("getFields().put(\$S, null)", TypeNameMetaFieldDef.name)
.build()
)
}
javaType.addMethod(
MethodSpec.methodBuilder("toString")
.returns(ClassName.get(String::class.java))
.addAnnotation(Override::class.java)
.addModifiers(Modifier.PUBLIC)
.addCode(
"""
|StringBuilder builder = new StringBuilder();
|builder.append("... on ${type.name} {");
|getFields().forEach((k, v) -> {
| builder.append(" ").append(k);
| if(v != null) {
| builder.append(" ").append(v.toString());
| }
|});
|builder.append("}");
|
|return builder.toString();
""".trimMargin()
)
.build()
)
val javaFile = JavaFile.builder(getPackageName(), javaType.build()).build()
return CodeGenResult(clientProjections = listOf(javaFile)).merge(codeGenResult)
}
private fun createSubProjection(type: TypeDefinition<*>, parent: TypeSpec, root: TypeSpec, prefix: String, processedEdges: Set<Pair<String, String>>, queryDepth: Int): CodeGenResult {
val subProjection = createSubProjectionType(type, parent, root, prefix, processedEdges, queryDepth)
?: return CodeGenResult()
val javaType = subProjection.first
val codeGenResult = subProjection.second
val javaFile = JavaFile.builder(getPackageName(), javaType.build()).build()
return CodeGenResult(clientProjections = listOf(javaFile)).merge(codeGenResult)
}
private fun createSubProjectionType(type: TypeDefinition<*>, parent: TypeSpec, root: TypeSpec, prefix: String, processedEdges: Set<Pair<String, String>>, queryDepth: Int): Pair<TypeSpec.Builder, CodeGenResult>? {
val className = ClassName.get(BaseSubProjectionNode::class.java)
val clazzName = "${prefix}Projection"
if (generatedClasses.contains(clazzName)) return null else generatedClasses.add(clazzName)
val javaType = TypeSpec.classBuilder(clazzName)
.addModifiers(Modifier.PUBLIC)
.superclass(ParameterizedTypeName.get(className, ClassName.get(getPackageName(), parent.name), ClassName.get(getPackageName(), root.name)))
.addMethod(
MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(ParameterSpec.builder(ClassName.get(getPackageName(), parent.name), "parent").build())
.addParameter(ParameterSpec.builder(ClassName.get(getPackageName(), root.name), "root").build())
.addCode("""super(parent, root, java.util.Optional.of("${type.name}"));""")
.build()
)
val fieldDefinitions = type.fieldDefinitions() +
document.definitions
.filterIsInstance<ObjectTypeExtensionDefinition>()
.filter { it.name == type.name }
.flatMap { it.fieldDefinitions }
val codeGenResult = if (queryDepth < config.maxProjectionDepth || config.maxProjectionDepth == -1) {
fieldDefinitions
.filterSkipped()
.mapNotNull {
val typeDefinition = it.type.findTypeDefinition(document, true)
if (typeDefinition != null) Pair(it, typeDefinition) else null
}
.filter { (_, typeDef) -> !processedEdges.contains(Pair(typeDef.name, type.name)) }
.map { (fieldDef, typeDef) ->
val projectionName = "${truncatePrefix(prefix)}_${fieldDef.name.capitalize()}Projection"
val methodName = ReservedKeywordSanitizer.sanitize(fieldDef.name)
javaType.addMethod(
MethodSpec.methodBuilder(methodName)
.returns(ClassName.get(getPackageName(), projectionName))
.addCode(
"""
| $projectionName projection = new $projectionName(this, getRoot());
| getFields().put("${fieldDef.name}", projection);
| return projection;
""".trimMargin()
)
.addModifiers(Modifier.PUBLIC)
.build()
)
if (fieldDef.inputValueDefinitions.isNotEmpty()) {
addFieldSelectionMethodWithArguments(fieldDef, projectionName, javaType, projectionRoot = "getRoot()")
}
val updatedProcessedEdges = processedEdges.toMutableSet()
updatedProcessedEdges.add(Pair(typeDef.name, type.name))
createSubProjection(typeDef, javaType.build(), root, "${truncatePrefix(prefix)}_${fieldDef.name.capitalize()}", updatedProcessedEdges, queryDepth + 1)
}
.fold(CodeGenResult()) { total, current -> total.merge(current) }
} else CodeGenResult()
fieldDefinitions
.filterSkipped()
.forEach {
val objectTypeDefinition = it.type.findTypeDefinition(document)
if (objectTypeDefinition == null) {
javaType.addMethod(
MethodSpec.methodBuilder(ReservedKeywordSanitizer.sanitize(it.name))
.returns(ClassName.get(getPackageName(), javaType.build().name))
.addCode(
"""
|getFields().put("${it.name}", null);
|return this;
""".trimMargin()
)
.addModifiers(Modifier.PUBLIC)
.build()
)
if (it.inputValueDefinitions.isNotEmpty()) {
val methodWithInputArgumentsBuilder = MethodSpec.methodBuilder(ReservedKeywordSanitizer.sanitize(it.name))
.returns(ClassName.get(getPackageName(), javaType.build().name))
.addCode(
"""
|getFields().put("${it.name}", null);
|getInputArguments().computeIfAbsent("${it.name}", k -> new ${'$'}T<>());
|${
it.inputValueDefinitions.joinToString("\n") { input ->
"""
|InputArgument ${input.name}Arg = new InputArgument("${input.name}", ${input.name});
|getInputArguments().get("${it.name}").add(${input.name}Arg);
""".trimMargin()
}}
|return this;
""".trimMargin(),
ArrayList::class.java
)
.addModifiers(Modifier.PUBLIC)
it.inputValueDefinitions.forEach { input ->
methodWithInputArgumentsBuilder.addParameter(ParameterSpec.builder(typeUtils.findReturnType(input.type), input.name).build())
}
javaType.addMethod(methodWithInputArgumentsBuilder.build())
}
}
}
val concreteTypesResult = createConcreteTypes(type, root, javaType, prefix, processedEdges, queryDepth)
val unionTypesResult = createUnionTypes(type, javaType, root, prefix, processedEdges, queryDepth)
return Pair(javaType, codeGenResult.merge(concreteTypesResult).merge(unionTypesResult))
}
private fun truncatePrefix(prefix: String): String {
return if (config.shortProjectionNames) ClassnameShortener.shorten(prefix) else prefix
}
fun getPackageName(): String {
return config.packageNameClient
}
private fun getDatatypesPackageName(): String {
return config.packageNameTypes
}
}
|
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
KEYPATH=$SCRIPTPATH/../.secrets
mkdir -p $KEYPATH
PRIVATE_KEY=$KEYPATH/private.pem
PUBLIC_KEY=$KEYPATH/public.pem
openssl genrsa -out $PRIVATE_KEY 2048
openssl rsa -in $PRIVATE_KEY -outform PEM -pubout -out $PUBLIC_KEY
|
#!/bin/bash
SOURCEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
# Build collector
pushd ../opentelemetry-lambda/collector || exit
make package
popd || exit
# Build the sdk layer and sample apps
./gradlew build
mkdir -p ../opentelemetry-lambda/java/build/extensions
cp ./build/libs/aws-otel-lambda-java-extensions.jar ../opentelemetry-lambda/java/build/extensions
cd ../opentelemetry-lambda/java || exit
./gradlew build -Potel.lambda.javaagent.dependency=software.amazon.opentelemetry:aws-opentelemetry-agent:1.6.0
# Combine the layers
pushd ./layer-javaagent/build/distributions || exit
unzip -qo opentelemetry-javaagent-layer.zip
rm opentelemetry-javaagent-layer.zip
mv otel-handler otel-handler-upstream
cp "$SOURCEDIR"/scripts/otel-handler .
unzip -qo ../../../../collector/build/collector-extension.zip
zip -qr opentelemetry-javaagent-layer.zip *
popd || exit
pushd ./layer-wrapper/build/distributions || exit
unzip -qo opentelemetry-java-wrapper.zip
rm opentelemetry-java-wrapper.zip
mv otel-handler otel-handler-upstream
mv otel-stream-handler otel-stream-handler-upstream
mv otel-proxy-handler otel-proxy-handler-upstream
cp "$SOURCEDIR"/scripts/* .
unzip -qo ../../../../collector/build/collector-extension.zip
zip -qr opentelemetry-java-wrapper.zip *
popd || exit
|
<?php
namespace app\index\controller;
use think\Controller;
use think\Db;
use think\Request;
/**
* Class TestRestFul
* @package app\index\controller
* 测试资源控制器
* 此控制器,是使用此命令创建的: php think make:controller index/TestRestFul
*
* restful理解:
* 概念:
* 一种软件架构风格、设计风格,而不是标准,只是提供了一组设计原则和约束条件。
* 它主要用于客户端和服务器交互类的软件。
* 基于这个风格设计的软件可以更简洁,更有层次,更易于实现缓存等机制。
*
* 特点:
* 1.资源通过URL来指定和操作。
* 2.对资源的操作包括获取、创建、修改和删除,正好对应HTTP协议的GET、POST、PUT 和 DELETE 方法。
* 3.连接是无状态性的。
* 4.能够利用Cache机制来提高性能。
*
* restful风格的 URL:
* 请求类型 生成路由规则 对应操作方法
* GET myRestFul index
* GET myRestFul/create create
* POST myRestFul save
* GET myRestFul/:id read
* GET myRestFul/:id/edit edit
* PUT myRestFul/:id update
* DELETE myRestFul/:id delete
*
* 注意:
* 这里的6个方法名称,必须叫这几个名字,否则会报错方法不存在
*/
class TestRestFul extends Controller
{
/**
* 显示资源列表
* @return \think\response\Json
* @throws \think\db\exception\DataNotFoundException
* @throws \think\db\exception\ModelNotFoundException
* @throws \think\exception\DbException
*/
public function index()
{
$user = Db::name('huser')->field('huser_id,huser_name')->select();
return json($user);
}
/**
* 显示创建资源表单页.
* @return string
*/
public function create()
{
return '这是我创建资源列表的页面';
}
/**
* 保存新建的资源
*
* @param \think\Request $request
* @return \think\Response
*/
public function save(Request $request)
{
$data = $request->post();
$num = Db::name('huser')->insert($data);
if($num > 0){
return json('创建成功');
}else{
return json('创建失败');
}
}
/**
* 显示指定的资源
* @param $id
* @return \think\response\Json
* @throws \think\db\exception\DataNotFoundException
* @throws \think\db\exception\ModelNotFoundException
* @throws \think\exception\DbException
*/
public function read($id)
{
$details = Db::name('huser')->find($id);
return json($details);
}
/**
* 显示编辑资源表单页.
* @param $id
* @return \think\response\Json
* @throws \think\db\exception\DataNotFoundException
* @throws \think\db\exception\ModelNotFoundException
* @throws \think\exception\DbException
*/
public function edit($id)
{
$editDetails = Db::name('huser')->find($id);
return json($editDetails);
}
/**
* 保存更新的资源
*
* @param Request $request
* @param $id
* @return string
* @throws \think\Exception
* @throws \think\exception\PDOException
*/
public function update(Request $request, $id)
{
$post = $request->param();
$num = Db::name('huser')->where('huser_id',$id)->update($post);
if($num > 0){
return '更新成功';
}else{
return '更新失败';
}
}
/**
* 删除资源
*
* @param $id
* @return string
* @throws \think\Exception
* @throws \think\exception\PDOException
*/
public function delete($id)
{
$num = Db::name('huser')->delete($id);
if($num > 0){
return '删除成功';
}else{
return '删除失败';
}
}
}
|
#! /bin/bash
basePath=$(cd "$(dirname "$0")";pwd)
cd $basePath
pod repo push FMPodSpec FMLayoutKit.podspec --allow-warnings &&
pod trunk push FMLayoutKit.podspec --allow-warnings
|
package verificationcode
type Service interface {
Type() string
Challenge(ctx *Context) (challenge *Challenge, err error)
Response(ctx *Context, code []byte) (result *Result, err error)
}
|
(function() {
'use strict';
angular.module('JournalApp.core')
.factory('ProcessData', ProcessData);
function ProcessData() {
var service = {
// place data manipulating functions here
formatDate: formatDate,
formatText: formatText,
getDateObject: getDateObject,
getMoodsArray: getMoodsArray,
sanitizeText: sanitizeText,
// getEditedMoods: getEditedMoods
};
function formatDate(date) {
var months = {
'0': 'January',
'1': 'February',
'2': 'May',
'3': 'April',
'4': 'May',
'5': 'June',
'6': 'July',
'7': 'August',
'8': 'September',
'9': 'October',
'10': 'November',
'11': 'December'
},
dateStr = '';
return dateStr += months[date.getMonth()] + ' ' + date.getDate() + ', ' + date.getFullYear();
}
function getDateObject(date) {
var months = {
'0': 'January',
'1': 'February',
'2': 'May',
'3': 'April',
'4': 'May',
'5': 'June',
'6': 'July',
'7': 'August',
'8': 'September',
'9': 'October',
'10': 'November',
'11': 'December'
},
dateObj = {
month: months[date.getMonth()],
day: date.getDate(),
year: date.getFullYear()
};
return dateObj;
}
function formatText(text) {
var paragraphs = text.split('\n\n');
var formatted = paragraphs.map(function(paragraph){
var wrapped = '<p>' + paragraph + '</p>';
return wrapped;
}).join('');
return formatted;
}
function getMoodsArray(entryObj) {
var moods;
if(entryObj.mood === 'Custom Mood'){
moods = entryObj.customMood.replace(/[,]/g, '').split(' ');
} else {
moods = Array(entryObj.mood);
}
return moods;
}
function sanitizeText(text) {
var paragraphs = text.split('<p>');
var formatted = paragraphs.map(function(paragraph, i){
var cleaned = '';
if(i === paragraphs.length-1){
cleaned = paragraph.replace('</p>', '');
} else {
cleaned = paragraph.replace('</p>', '\n\n');
}
return cleaned;
}).join('');
return formatted;
}
// needed to add new moods on PUT requests
// function getEditedMoods(newMoods, entryMoods) {
// var moods = newMoods.replace(/[,]/g, '').split(' ');
// return entryMoods.concat(moods);
// }
return service;
}
})();
|
import React from "react";
import Layout from "../components/Layout";
import { push } from "gatsby";
interface State {
timeout: number;
}
class NotFoundPage extends React.Component<{}, State> {
private timer = -1;
constructor(props: {}) {
super(props);
this.state = { timeout: 5 };
}
public componentDidMount() {
this.timer = window.setInterval(() => {
if (this.state.timeout === 1) {
push("/");
return;
}
this.setState({ timeout: this.state.timeout - 1 });
}, 1000);
}
public componentWillUnmount() {
window.clearInterval(this.timer);
}
public render() {
return (
<Layout>
<h1>404 Not Found</h1>
<div>
<br />
{this.state.timeout}¡
</div>
</Layout>
);
}
}
export default NotFoundPage;
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Text;
using Azure.Storage.Files.DataLake.Models;
namespace Azure.Storage.Files.DataLake
{
internal class DataLakeErrors
{
public static ArgumentException EntityIdAndInvalidAccessControlType(string s)
=> new ArgumentException($"AccessControlType must be User or Group if entityId is specified. Value is \"{s}\"");
public static ArgumentException PathAccessControlItemStringInvalidLength(string s)
=> new ArgumentException($"{nameof(s)} should have 3 or 4 parts delimited by colons. Value is \"{s}\"");
public static ArgumentException PathAccessControlItemStringInvalidPrefix(string s)
=> new ArgumentException($"If {nameof(s)} is 4 parts, the first must be \"default\". Value is \"{s}\"");
public static ArgumentException RemovePathAccessControlItemInvalidString(string s)
=> new ArgumentException($"{nameof(s)} must have 1 to 3 parts delimited by colons. Value is \"{s}\"");
public static ArgumentException RemovePathAccessControlItemStringInvalidPrefix(string s)
=> new ArgumentException($"If {nameof(s)} is 3 parts, the first must be \"default\". Value is \"{s}\"");
public static ArgumentException PathPermissionsOctalInvalidLength(string s)
=> new ArgumentException($"{nameof(s)} must be 4 characters. Value is \"{s}\"");
public static ArgumentException PathPermissionsOctalInvalidFirstDigit(string s)
=> new ArgumentException($"First digit of {nameof(s)} must be 0 or 1. Value is \"{s}\"");
public static ArgumentException PathPermissionsSymbolicInvalidLength(string s)
=> new ArgumentException($"{nameof(s)} must be 9 or 10 characters. Value is \"{s}\"");
public static ArgumentException RolePermissionsSymbolicInvalidCharacter(string s)
=> new ArgumentException($"Role permission contains an invalid character. Value is \"{s}\"");
public static ArgumentException RolePermissionsSymbolicInvalidLength(string s)
=> new ArgumentException($"Role permission must be 3 characters. Value is \"{s}\"");
public static DataLakeAclChangeFailedException ChangeAclRequestFailed(RequestFailedException exception, string continuationToken)
=> new DataLakeAclChangeFailedException(
$"An error occurred while recursively changing the access control list. " +
$"See the {nameof(exception.InnerException)} of type {exception.GetType().FullName} " +
$"with {nameof(exception.Status)}={exception.Status} and " +
$"{nameof(exception.ErrorCode)}={exception.ErrorCode} for more information. " +
$"You can resume changing the access control list using " +
$"{nameof(DataLakeAclChangeFailedException.ContinuationToken)}={continuationToken} " +
$"after addressing the error.",
exception,
continuationToken);
public static DataLakeAclChangeFailedException ChangeAclFailed(Exception exception, string continuationToken)
=> new DataLakeAclChangeFailedException(
$"An error occurred while recursively changing the access control list. See the {nameof(exception.InnerException)} " +
$"of type {exception.GetType().FullName} for more information. You can resume changing the access control list using " +
$"{nameof(DataLakeAclChangeFailedException.ContinuationToken)}={continuationToken} after addressing the error.",
exception,
continuationToken);
}
}
|
export * from './const'
export * from './core'
export * from './definitions'
export * from './level'
export * from './root'
export * from './types'
|
#!/bin/bash
BASEDIR=$(dirname "$BASH_SOURCE")
$BASEDIR/../vendor/bin/php-cs-fixer fix "$@" $BASEDIR/../
|
use crate::*;
use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
pub struct MediaType {
#[serde(skip_serializing_if = "Option::is_none")]
pub schema: Option<ReferenceOr<Schema>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub example: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub examples: IndexMap<String, ReferenceOr<Example>>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub encoding: IndexMap<String, Encoding>,
}
|
import * as ts from 'typescript';
import * as Lint from 'tslint';
const OPTION_ALWAYS = 'always';
export class Rule extends Lint.Rules.AbstractRule {
public static FAILURE_STRING = {
always: {
start: `A space is required after '{'`,
end: `A space is required before '}'`
},
never: {
start: `There should be no space after '{'`,
end: `There should be no space before '}'`
}
};
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const walker = new ObjectCurlySpacingWalker(sourceFile, this.getOptions());
return this.applyWithWalker(walker);
}
}
class ObjectCurlySpacingWalker extends Lint.RuleWalker {
private always: boolean;
private exceptions: {
arraysInObjects: boolean;
objectsInObjects: boolean;
};
constructor(sourceFile: ts.SourceFile, options: Lint.IOptions) {
super(sourceFile, options);
this.always = this.hasOption(OPTION_ALWAYS) || (this.getOptions() && this.getOptions().length === 0);
const opt = this.getOptions();
this.exceptions = opt[1] || {};
if (this.exceptions.arraysInObjects === undefined) {
this.exceptions.arraysInObjects = this.always;
}
if (this.exceptions.objectsInObjects === undefined) {
this.exceptions.objectsInObjects = this.always;
}
}
protected visitNode(node: ts.Node): void {
const bracedKind = [
ts.SyntaxKind.ObjectLiteralExpression,
ts.SyntaxKind.ObjectBindingPattern,
ts.SyntaxKind.NamedImports,
ts.SyntaxKind.NamedExports
];
if (bracedKind.indexOf(node.kind) > -1) {
this.checkSpacingInsideBraces(node);
}
super.visitNode(node);
}
private checkSpacingInsideBraces(node: ts.Node): void {
const text = node.getText();
if (text.indexOf('\n') !== -1 || /^\{\s*\}$/.test(text)) {
// Rule does not apply when the braces span multiple lines
return;
}
// Lookup whether the last value in the object is an object or array literal
let endsWithObjectLiteral = false;
let endsWithArrayLiteral = false;
if (node.getChildren().length === 3) {
const contents = node.getChildren()[1].getChildren();
if (contents.length > 0) {
const lastElement = contents[contents.length - 1];
if (lastElement.kind === ts.SyntaxKind.PropertyAssignment || lastElement.kind === ts.SyntaxKind.BindingElement) {
const value = lastElement.getChildren();
if (value.length === 3) {
endsWithObjectLiteral = value[2].kind === ts.SyntaxKind.ObjectLiteralExpression || value[2].kind === ts.SyntaxKind.ObjectBindingPattern;
endsWithArrayLiteral = value[2].kind === ts.SyntaxKind.ArrayLiteralExpression;
}
}
}
}
// We have matching braces, lets find out number of leading spaces
const leadingSpace = text.match(/^\{(\s{0,2})/)![1].length;
if (this.always) {
if (leadingSpace === 0) {
const fix = Lint.Replacement.appendText(node.getStart() + 1, ' ');
this.addFailure(this.createFailure(node.getStart(), 1, Rule.FAILURE_STRING.always.start, fix));
}
} else {
if (leadingSpace > 0) {
const fix = Lint.Replacement.deleteText(node.getStart() + 1, leadingSpace);
this.addFailure(this.createFailure(node.getStart(), 1, Rule.FAILURE_STRING.never.start, fix));
}
}
// Finding trailing spaces requires checking if exceptions apply, and adjusting accordingly
const trailingSpace = text.match(/(\s{0,2})}$/)![1].length;
const arrayExceptionApplies = this.always !== this.exceptions.arraysInObjects && endsWithArrayLiteral;
const objectExceptionApplies = this.always !== this.exceptions.objectsInObjects && endsWithObjectLiteral;
const spaceRequired = arrayExceptionApplies || objectExceptionApplies ? !this.always : this.always;
if (spaceRequired) {
if (trailingSpace === 0) {
const fix = Lint.Replacement.appendText(node.getEnd() - 1, ' ');
this.addFailure(this.createFailure(node.getEnd() - 1, 1, Rule.FAILURE_STRING.always.end, fix));
}
} else {
if (trailingSpace > 0) {
const fix = Lint.Replacement.deleteText(node.getEnd() - trailingSpace - 1, trailingSpace);
this.addFailure(this.createFailure(node.getEnd() - 1, 1, Rule.FAILURE_STRING.never.end, fix));
}
}
}
}
|
# Finds non-whitelisted tags.
module JekyllPrepublish
class PostTagValidator
def initialize(configuration)
@whitelist = Set.new(
configuration.fetch('tag_whitelist', Array.new))
end
def describe_validation
"Checking tags are from whitelist [#{@whitelist.to_a.join(', ')}]."
end
def validate(post, _document, _site)
tags = Set.new(post.data["tags"])
# Check if tags are valid.
if tags <= @whitelist
return nil
end
# Create error message.
tag_difference = tags - @whitelist
"Tags not allowed are {#{tag_difference.to_a.join(", ")}}."
end
end
end
|
import {
TextDocumentContentProvider,
ExtensionContext,
Uri,
Event,
EventEmitter,
commands,
window
} from 'vscode';
import { CheckpointsModel, ICheckpoint, IFile, ICheckpointStore, isCheckpoint, isFile } from './CheckpointsModel';
import * as path from 'path';
export class CheckpointsDocumentView implements TextDocumentContentProvider {
private _onDidChange: EventEmitter<Uri> = new EventEmitter<Uri>();
readonly onDidChange: Event<Uri> = this._onDidChange.event;
readonly context: ExtensionContext;
constructor(context: ExtensionContext, private model: CheckpointsModel) {
this.context = context;
context.subscriptions.push(
model.onDidRemoveCheckpoint((removedItem: ICheckpoint | IFile | ICheckpointStore) => {
if (isCheckpoint(removedItem)){
this._onDidChange.fire(this.getCheckpointUri(removedItem));
} else if (isFile(removedItem)) {
this._onDidChange.fire(Uri.parse(removedItem.id));
}
}),
model.onDidUpdateItem((updatedItem: ICheckpoint | IFile) => {
if (isCheckpoint(updatedItem)){
this._onDidChange.fire(this.getCheckpointUri(updatedItem));
} else if (isFile(updatedItem)) {
this._onDidChange.fire(Uri.parse(updatedItem.id));
}
}),
);
}
/**
* Diff a checkpoint against a document.
* @param comparisonDocumentUri The uri to the document to diff against.
* @param checkpointId The id of the checkpoint.
*/
public showDiffWithDocument(comparisonDocumentUri: Uri, checkpointId: string) {
console.log(`
Show diff between document '${
comparisonDocumentUri.path
}' and checkpoint with id '${checkpointId}'
`);
const checkpoint = this.model.getCheckpoint(checkpointId);
if (!checkpoint) {
console.error(`The checkpoint with id: '${checkpointId}' does not exist`);
return;
}
const checkpointUri = this.getCheckpointUri(checkpoint);
const comparingDocumentName = path.basename(checkpointUri.toString());
const diffTitle = `${comparingDocumentName}<->${checkpoint.name}`;
commands.executeCommand('vscode.diff', comparisonDocumentUri, checkpointUri, diffTitle);
}
/**
* Diff two checkpoints against eachother.
* @param checkpointId1 checkpoint shown to the left
* @param checkpointId2 checkpoint shown to the right
*/
public showDiffWithCheckpoint(checkpointId1: string, checkpointId2: string): void {
console.log(`
Show diff between checkpoint '${
checkpointId1
}' and checkpoint '${checkpointId2}'
`);
const checkpoint1 = this.model.getCheckpoint(checkpointId1);
const checkpoint2 = this.model.getCheckpoint(checkpointId2);
if (!checkpoint1) {
console.error(`The checkpoint with id: '${checkpointId1}' does not exist`);
return;
} else if (!checkpoint2) {
console.error(`The checkpoint with id: '${checkpointId2}' does not exist`);
return;
}
const checkpointUri1 = this.getCheckpointUri(checkpoint1);
const checkpointUri2 = this.getCheckpointUri(checkpoint2);
const diffTitle = `${checkpoint1.name}<->${checkpoint2.name}`;
commands.executeCommand('vscode.diff', checkpointUri1, checkpointUri2, diffTitle);
}
/**
* Preview the checkpoint in a readonly document.
* @param checkpointId The id of the checkpoint
*/
public showPreview(checkpointId: string) {
console.log(`Show preview of checkpoint with id '${checkpointId}'`);
const checkpoint = this.model.getCheckpoint(checkpointId);
if (!checkpoint) {
console.error(`The checkpoint with id: '${checkpointId}' does not exist`);
return;
}
const checkpointUri = this.getCheckpointUri(checkpoint);
window.showTextDocument(checkpointUri);
}
/**
* Provide textual content for a given uri.
* The editor will use the returned string-content to create a
* readonly document. Resources allocated should be released
* when the corresponding document has been closed.
*/
public provideTextDocumentContent(uri: Uri): string {
let checkpointId = uri.fragment;
let checkpoint = this.model.getCheckpoint(checkpointId);
// Checkpoint was removed
if (checkpoint) {
return checkpoint.text;
} else {
console.warn("Checkpoint you are currently viewing has been removed.")
}
}
/**
* Get the uri for the (fake) document.
* @param checkpoint The checkpoint
*/
private getCheckpointUri(checkpoint: ICheckpoint): Uri {
const filePath = Uri.parse(checkpoint.parent);
// Set the checkpoint id to be the 'fragment' of the uri.
// The uri's 'path' part needs to be a file (fake or not) that has the
// right file extension for syntax highlighting to work. We use the parent
// files path
return Uri.parse(`checkpointsDocumentView://checkpoint/${filePath.path}#${checkpoint.id}`);
}
}
|
import sys
import numpy as np
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix, f1_score
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import train_test_split
np.set_printoptions(threshold=sys.maxsize)
# printing analysis to txt file
orig_stdout = sys.stdout
f = open("random_forest_analysis.txt", 'w')
sys.stdout = f
# Importing the dataset for red_wine and white wine
dataset_red = pd.read_csv('..\winequality-red.csv', encoding="ISO-8859-1")
dataset_red.insert(0, 'color', 1)
dataset_white = pd.read_csv('..\winequality-white.csv', encoding="ISO-8859-1")
dataset_white.insert(0, 'color', 0)
# combning two datasets
dataset = pd.concat([dataset_red, dataset_white], axis=0)
print("==========================================================================")
# splitting dataset into features and classifier-output
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, -1].values
# splitting data-set into training set and test set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
# feature-scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# fitting classifier for the training set
classifier = RandomForestClassifier(n_estimators=100, criterion='entropy')
print("==========================================================================")
print("Analysis for classifier with default values : {}".format(np.str(classifier).split('(')[0]))
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
cm = confusion_matrix(y_true=y_test, y_pred=y_pred)
print(cm)
f1_score_for_classifier = f1_score(y_test, y_pred, average='micro')
print("F1 score is {}".format(f1_score_for_classifier))
# applying k-fold cross validation
accuracies = cross_val_score(estimator=classifier, X=X_train, y=y_train, cv=3)
print("accuracies are {}".format(accuracies))
print(
"accuracy values on k-fold cross validation have mean as {} and std as {}".format(
round(accuracies.mean(), 6),
round(accuracies.std(), 2)))
# performing grid-search
from sklearn.model_selection import GridSearchCV
parameters = [{'n_estimators ': [1, 5, 10, 100, 200, 500], 'criterion ': ['gini']},
{'n_estimators ': [1, 5, 10, 100, 200, 500], 'criterion ': ['entropy']}]
param_grid = {
'n_estimators': [5, 10, 30, 100, 200, 700, 1200],
'max_features': ['auto', 'sqrt', 'log2'],
'criterion': ['gini', 'entropy'],
}
grid_search = GridSearchCV(estimator=classifier, param_grid=param_grid, scoring='accuracy', cv=3, n_jobs=-1)
grid_search = grid_search.fit(X_train, y_train)
best_accuracy = grid_search.best_score_
best_params = grid_search.best_params_
print("Best accuracy obtained is {}".format(best_accuracy))
print(best_accuracy
)
print("With params {}".format(best_params))
sys.stdout = orig_stdout
f.close()
|
package abc
import utilities.debugLog
import java.util.*
fun main(args: Array<String>) {
val sc = Scanner(System.`in`)
val r1 = sc.nextLong()
val c1 = sc.nextLong()
val r2 = sc.nextLong()
val c2 = sc.nextLong()
println(problem184c(r1, c1, r2, c2))
}
fun problem184c(r1: Long, c1: Long, r2: Long, c2: Long): Int {
if (r1 == r2 && c1 == c2) return 0
if (r1 + c1 == r2 + c2 || r1 - c1 == r2 - c2) return 1
if (Math.abs(r1 - r2) + Math.abs(c1 - c2) <= 3) return 1
if ((r1 + c1 + r2 + c2) % 2 == 0L) return 2
if (Math.abs(r1 - r2) + Math.abs(c1 - c2) <= 6) return 2
if (Math.abs((r1 - c1) - (r2 - c2)) <= 3) return 2
if (Math.abs((r1 + c1) - (r2 + c2)) <= 3) return 2
return 3
}
|
// import store from '~/monitoring/stores/embed_group';
import * as actions from '~/monitoring/stores/embed_group/actions';
import * as types from '~/monitoring/stores/embed_group/mutation_types';
import { mockNamespace } from '../../mock_data';
describe('Embed group actions', () => {
describe('addModule', () => {
it('adds a module to the store', () => {
const commit = jest.fn();
actions.addModule({ commit }, mockNamespace);
expect(commit).toHaveBeenCalledWith(types.ADD_MODULE, mockNamespace);
});
});
});
|
#!/bin/bash
# SPDX-License-Identifier: Apache-2.0
# Copyright 2021 Authors of Cilium
DIR=$(dirname $(readlink -ne $BASH_SOURCE))
source "${DIR}/lib/common.sh"
source "${DIR}/../backporting/common.sh"
usage() {
logecho "usage: $0 <RUN-URL> [VERSION] [GH-USERNAME]"
logecho "RUN-URL GitHub URL with the RUN for the release images"
logecho " example: https://github.com/cilium/cilium/actions/runs/600920964"
logecho "VERSION Target version (X.Y.Z) (default: read from VERSION file)"
logecho "GH-USERNAME GitHub username for authentication (default: autodetect)"
logecho "GITHUB_TOKEN environment variable set with the scope public:repo"
logecho
logecho "--help Print this help message"
}
handle_args() {
if ! common::argc_validate 4; then
usage 2>&1
common::exit 1
fi
if [[ "$1" = "--help" ]] || [[ "$1" = "-h" ]]; then
usage
common::exit 0
fi
if ! hub help | grep -q "pull-request"; then
echo "This tool relies on 'hub' from https://github.com/github/hub." 1>&2
echo "Please install this tool first." 1>&2
common::exit 1
fi
if ! git diff --quiet; then
echo "Local changes found in git tree. Exiting release process..." 1>&2
exit 1
fi
if [ ! -z "$2" ] && ! echo "$2" | grep -q "[0-9]\+\.[0-9]\+\.[0-9]\+"; then
usage 2>&1
common::exit 1 "Invalid VERSION ARG \"$2\"; Expected X.Y.Z"
fi
if [ -z "${GITHUB_TOKEN}" ]; then
usage 2>&1
common::exit 1 "GITHUB_TOKEN not set!"
fi
}
main() {
handle_args "$@"
local ersion version branch user_remote
ersion="$(echo ${2:-$(cat VERSION)} | sed 's/^v//')"
version="v${ersion}"
branch=$(echo $version | sed 's/.*v\([0-9]\.[0-9]\).*/\1/')
user_remote=$(get_user_remote ${3:-})
git checkout -b pr/$version-digests $version
${DIR}/pull-docker-manifests.sh "$@"
logecho
logecho "Check that the following changes look correct:"
# TODO: Make this less interactive when we have used it enough
git add --patch install/kubernetes
git commit -se -m "install: Update image digests for $version" -m "$(cat digest-$version.txt)"
echo "Create PR for v$branch with these changes"
if ! common::askyorn ; then
common::exit 0 "Aborting post-release updates."
fi
logecho "Sending pull request for branch v$branch..."
PR_BRANCH=$(git rev-parse --abbrev-ref HEAD)
git push $user_remote "$PR_BRANCH"
hub pull-request -b "v$branch" -l backport/$branch
}
main "$@"
|
---
title: Verkefni dagsins
lysing: >-
Draga einhvern í fjölskyldunni niður á bryggju, taka mynd af sér herma eftir
fiski og senda okkur....:)
dagsetning: 12/14
---
|
using Distributed
addprocs(3, exeflags="--project")
const jobs = RemoteChannel(()->Channel{Int}(32))
const results = RemoteChannel(()->Channel{Tuple}(32))
n = 12
function make_jobs(n)
for i in 1:n
put!(jobs, i)
end
end
make_jobs(n) # Feed the jobs channel with "n" jobs.
@everywhere function do_work(jobs, results) # Define work function everywhere.
while true
job_id = take!(jobs)
exec_time = rand()
sleep(exec_time)
put!(results, (job_id, exec_time, myid()))
end
end
for p in workers() # Start tasks on the workers to process requests in parallel.
@async remote_do(do_work, p, jobs, results) # Similar to remotecall.
end
@elapsed while n > 0 # Print out results.
job_id, exec_time, location_worker = take!(results)
println("$job_id finished in $(round(exec_time, digits=2)) seconds on worker $location_worker")
n = n - 1
end
|
import Tag from './src/Tag.vue';
export { Tag as OTag };
|
package by.godevelopment.currencyappsample.domain.models
data class CurrenciesDataModel(
val header: String = "",
val oldData: String = "",
val newData: String = "",
val currencyItems: List<ItemCurrencyModel> = listOf()
)
|
require "spec_helper"
describe Glysellin::Image do
it { should belong_to(:imageable) }
it { should have_attached_file(:image) }
before(:each) do
@discount_type = create(:discount_type)
end
describe '#image_url' do
[
['order-percentage', '%'],
['fixed-price', '€']
].each do |(identifier, value)|
it 'returns string value of object' do
@discount_type.identifier = identifier
expect(@discount_type.to_s).to eq value
end
end
end
end
|
import java.util.*;
import org.junit.Test;
import static org.junit.Assert.*;
// LC1665: https://leetcode.com/problems/minimum-initial-energy-to-finish-tasks/
//
// You are given an array tasks where tasks[i] = [actual_i, minimum_i]:
// actual_i is the actual amount of energy you spend to finish the ith task.
// minimum_i is the minimum amount of energy you require to begin the ith task.
// You can finish the tasks in any order you like.
// Return the minimum initial amount of energy you will need to finish all the tasks.
//
// Constraints:
// 1 <= tasks.length <= 10^5
// 1 <= actual_i <= minimum_i <= 10^4
public class MinimumEffort {
// Sort + Binary Search
// time complexity: O(N*log(N)), space complexity: O(log(N))
// 34 ms(49.93%), 96.5 MB(82.82%) for 34 tests
public int minimumEffort(int[][] tasks) {
Arrays.sort(tasks, (a, b) -> b[1] - b[0] - a[1] + a[0]);
int low = 0;
for (int high = 1000_000_000; low < high; ) {
int mid = (low + high) >>> 1;
if (ok(tasks, mid)) {
high = mid;
} else {
low = mid + 1;
}
}
return low;
}
private boolean ok(int[][] tasks, int energy) {
for (int[] task : tasks) {
if (energy < task[1]) { return false; }
energy -= task[0];
}
return true;
}
// Sort + Greedy
// time complexity: O(N*log(N)), space complexity: O(log(N))
// 21 ms(89.58%), 96.9 MB(66.85%) for 34 tests
public int minimumEffort2(int[][] tasks) {
Arrays.sort(tasks, (a, b) -> b[1] - b[0] - a[1] + a[0]);
int res = 0;
int saved = 0;
for (int[] task : tasks) {
int min = task[1];
if (min > saved) {
res += (min - saved);
saved = min;
}
saved -= task[0];
}
return res;
}
// Sort + Greedy
// time complexity: O(N*log(N)), space complexity: O(log(N))
// 21 ms(89.58%), 96.6 MB(76.46%) for 34 tests
public int minimumEffort3(int[][] tasks) {
Arrays.sort(tasks, (a, b) -> a[1] - a[0] - b[1] + b[0]);
int res = 0;
for (int[] task : tasks) {
res = Math.max(res + task[0], task[1]);
}
return res;
}
private void test(int[][] tasks, int expected) {
assertEquals(expected, minimumEffort(tasks));
assertEquals(expected, minimumEffort2(tasks));
assertEquals(expected, minimumEffort3(tasks));
}
@Test public void test() {
test(new int[][] {{1, 2}, {2, 4}, {4, 8}}, 8);
test(new int[][] {{1, 3}, {2, 4}, {10, 11}, {10, 12}, {8, 9}}, 32);
test(new int[][] {{1, 7}, {2, 8}, {3, 9}, {4, 10}, {5, 11}, {6, 12}}, 27);
test(new int[][] {{1, 2}, {1, 7}, {2, 3}, {5, 9}, {2, 2}}, 11);
}
public static void main(String[] args) {
String clazz = new Object() {
}.getClass().getEnclosingClass().getSimpleName();
org.junit.runner.JUnitCore.main(clazz);
}
}
|
using System;
namespace TNeural
{
public delegate float Activator(float input);
public static class Activators
{
public static readonly Activator ReLU = (i) => Math.Max(0.0f, i);
public static readonly Activator Sigmoid = (i) => (float) (1.0 / (1 + Math.Exp(-i)));
public static readonly Activator Tanh = (i) => (float) Math.Tanh(i);
public static readonly Activator Identity = (i) => i;
}
}
|
<?php
namespace App\Http\Controllers;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use Illuminate\Http\Request;
use App\Torneo;
use PDF;
class TorneoController extends BaseController
{
public function inicio()
{
return view('torneo.inicio');
}
public function index()
{
$torneos = Torneo::orderBy('ESTADO','asc')->paginate(7);
return view('torneo.index',['torneos' => $torneos]);
}
public function buscar()
{
$data = request()->all();
if(isset($data['textoBuscar']))
{
if($data['textoBuscar'] != "")
{
//dd($data['textoBuscar']);
$torneos = Torneo::where('NOMBRE', 'LIKE', '%' . $data['textoBuscar'] . '%' )->paginate(7);
//dd($torneos);
if(count($torneos) > 0)
{
return view('torneo.index',['torneos' => $torneos]);
}else{
return redirect('torneo/');
}
}
}
return redirect('torneo/');
}
public function nuevo()
{
return view('torneo.nuevo');
}
public function crear(Request $request)
{
$data = request()->all();
$torneo = new Torneo();
foreach($torneo->getFillable() as $atributo){
//dd($atributo);
$dato = $request[strtolower($atributo)];
if(isset($dato)){
$torneo->setAttribute(strtoupper($atributo),$dato);
$torneo->save();
//dd($torneo);
}else{
dd('Sin definir');
}
}
//dd($request['nombre']);
//Si existen torneos activos los cancelos, asi solamente tengo un torneo activo
//Torneo::where('ESTADO','A')->update(['ESTADO'=>'F']);
/*Torneo::create(
[
'NOMBRE' => $data['nombre'],
'DESCRIPCION' => $data['descripcion'],
'FECHA' => $data['fecha'],
'ESTADO' => $data['estado']
]
);*/
return redirect('torneo/');
}
public function ver($id)
{
$torneo = Torneo::find($id);
return view('torneo.ver', ['torneo' => $torneo]);
}
public function eliminar($id)
{
Torneo::destroy($id);
return redirect('torneo/');
}
public function editar($id)
{
$torneo = Torneo::find($id);
return view('torneo.editar', ['torneo' => $torneo]);
}
public function actualizar()
{
$data = request()->all();
if($data['estado']=='A'){
Torneo::where('ESTADO','A')->update(['ESTADO'=>'F']);
}
$id = $data['id'];
$torneo = Torneo::find($id);
$torneo->NOMBRE = $data['nombre'];
$torneo->DESCRIPCION = $data['descripcion'];
$torneo->FECHA = $data['fecha'];
$torneo->ESTADO = $data['estado'];
print("".$torneo->ESTADO);
$torneo->save();
return redirect('torneo/');
}
public function reporte()
{
$pdf = PDF::loadView('reportes.torneo',['titulo'=>'Reporte Ejemplo']);
return $pdf->stream();
//return $pdf->download( 'torneo.pdf' );
}
public function eliminarTodosLosDatos()
{
$torneos = Torneo::where('ID_TORNEO','>',0)->delete();
return redirect('parametro/');
}
}
|
import 'dart:convert' show jsonDecode, jsonEncode;
import 'package:http/http.dart' as http;
import 'package:lifx_http_api/src/responses/exceptions/lifx_http_exception.dart';
import './properties/properties.dart';
import './devices/devices.dart';
import './responses/responses.dart';
/// Client to access the LIFX HTTP API.
///
/// LIFX client that takes an [apiKey] to access the LIFX HTTP API endpoints listed at
/// https://api.developer.lifx.com/docs/
class Client {
final String apiKey;
Client(this.apiKey);
/// API call to list all lights by default or [selector].
Future<Iterable<Bulb>> listLights({String selector = "all"}) async {
final Uri url = Uri.parse('https://api.lifx.com/v1/lights/$selector');
final Map<String, String> headers = {"Authorization": "Bearer $apiKey"};
final http.Response response = await http.get(url, headers: headers);
try {
// Handle invalid credentials
if (response.statusCode == 401) {
throw LifxUnauthorizedError.fromJson(
jsonDecode(response.body) as Map<String, dynamic>);
}
// Handle valid credentials and available bulbs
if (response.statusCode == 200) {
final data = (jsonDecode(response.body) as List<dynamic>)
.cast<Map<String, dynamic>>();
final Iterable<Bulb> bulbs =
data.map((Map<String, dynamic> bulb) => Bulb.fromJson(bulb));
return bulbs;
}
throw LifxHttpException(
body: response.body,
statusCode: response.statusCode,
);
} catch (e) {
throw Exception(e);
}
}
/// API call to set the state of a light.
Future<SetStateBody> setState(
String id, {
String? power,
double? brightness,
double? duration,
double? infrared,
bool? fast,
LifxColor? color,
}) async {
final url = Uri.parse("https://api.lifx.com/v1/lights/$id/state");
final headers = {
"Authorization": "Bearer $apiKey",
"content-type": "application/json"
};
// Form body from optional parameters
final body = {};
if (power != null) body["power"] = power;
if (brightness != null) body["brightness"] = brightness;
if (duration != null) body["duration"] = duration;
if (infrared != null) body["infrared"] = infrared;
if (fast != null) body["fast"] = fast;
if (color != null) body["color"] = color;
final http.Response response = await http.put(
url,
headers: headers,
body: jsonEncode(body),
);
try {
// Handle invalid credentials
if (response.statusCode == 401) {
throw LifxUnauthorizedError.fromJson(
jsonDecode(response.body) as Map<String, dynamic>);
}
// Handle valid credentials and request
if (response.statusCode == 207) {
final Map<String, dynamic> data =
jsonDecode(response.body) as Map<String, dynamic>;
final SetStateBody body = SetStateBody.fromJson(data);
return body;
}
throw LifxHttpException(
body: response.body,
statusCode: response.statusCode,
);
} catch (e) {
throw Exception(e);
}
}
}
|
/*
* vim:ts=4:sw=4:expandtab
*
* Copyright © 2016 Ingo Bürk
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the names of the authors or their
* institutions shall not be used in advertising or otherwise to promote the
* sale, use or other dealings in this Software without prior written
* authorization from the authors.
*
*/
#include "externals.h"
#include "entry.h"
#include "util.h"
#define BUFFER_SIZE 1024
/**
* Appends a single character to the current buffer.
* If the buffer is not yet initialized or has been invalidated, it will be set up.
*
*/
static void xcb_xrm_append_char(xcb_xrm_entry_t *entry, xcb_xrm_entry_parser_state_t *state,
const char str) {
ptrdiff_t offset;
if (state->buffer_pos == NULL) {
FREE(state->buffer);
state->buffer = calloc(1, BUFFER_SIZE);
state->buffer_pos = state->buffer;
if (state->buffer == NULL) {
/* Let's ignore this character and try again next time. */
return;
}
}
/* Increase the buffer if necessary. */
offset = state->buffer_pos - state->buffer;
if (offset % BUFFER_SIZE == BUFFER_SIZE - 1) {
state->buffer = realloc(state->buffer, offset + BUFFER_SIZE + 1);
state->buffer_pos = state->buffer + offset;
}
*(state->buffer_pos++) = str;
}
/**
* Insert a new component of the given type.
* This function does not check whether there is an open buffer.
*
*/
static void xcb_xrm_insert_component(xcb_xrm_entry_t *entry, xcb_xrm_component_type_t type,
xcb_xrm_binding_type_t binding_type, const char *str) {
xcb_xrm_component_t *new = calloc(1, sizeof(struct xcb_xrm_component_t));
if (new == NULL)
return;
if (str != NULL) {
new->name = strdup(str);
if (new->name == NULL) {
FREE(new);
return;
}
}
new->type = type;
new->binding_type = binding_type;
TAILQ_INSERT_TAIL(&(entry->components), new, components);
}
/**
* Finalize the current buffer by writing it into a component if necessary.
* This function also resets the buffer to a clean slate.
*
*/
static void xcb_xrm_finalize_component(xcb_xrm_entry_t *entry, xcb_xrm_entry_parser_state_t *state) {
if (state->buffer_pos != NULL && state->buffer_pos != state->buffer) {
*(state->buffer_pos) = '\0';
xcb_xrm_insert_component(entry, CT_NORMAL, state->current_binding_type, state->buffer);
}
FREE(state->buffer);
/* No need to handle NULL for this calloc call. */
state->buffer = calloc(1, BUFFER_SIZE);
state->buffer_pos = state->buffer;
state->current_binding_type = BT_TIGHT;
}
/*
* Parses a specific resource string.
*
* @param str The resource string.
* @param entry A return struct that will contain the parsed resource. The
* memory will be allocated dynamically, so it must be freed.
* @param resource_only If true, only components of type CT_NORMAL are allowed.
*
* @return 0 on success, a negative error code otherwise.
*
*/
int xcb_xrm_entry_parse(const char *_str, xcb_xrm_entry_t **_entry, bool resource_only) {
char *str;
xcb_xrm_entry_t *entry = NULL;
xcb_xrm_component_t *last;
char *value;
char *value_walk;
xcb_xrm_binding_type_t binding_type;
xcb_xrm_entry_parser_state_t state = {
.chunk = CS_INITIAL,
.current_binding_type = BT_TIGHT,
};
/* Copy the input string since it's const. */
str = strdup(_str);
if (str == NULL)
return -FAILURE;
/* This is heavily overestimated, but we'll just keep it simple here.
* While this does not account for replacement of magic values, those only
* make the resulting string shorter than the input, so we're okay. */
value = calloc(1, strlen(str));
if (value == NULL) {
FREE(str);
return -FAILURE;
}
value_walk = value;
/* Allocate memory for the return parameter. */
*_entry = calloc(1, sizeof(struct xcb_xrm_entry_t));
if (_entry == NULL) {
FREE(str);
FREE(value);
return -FAILURE;
}
entry = *_entry;
TAILQ_INIT(&(entry->components));
for (char *walk = str; *walk != '\0'; walk++) {
switch (*walk) {
case '.':
case '*':
state.chunk = MAX(state.chunk, CS_COMPONENTS);
if (state.chunk >= CS_PRE_VALUE_WHITESPACE) {
goto process_normally;
}
if (*walk == '*' && resource_only) {
goto done_error;
}
/* Subsequent bindings must be collapsed into a loose binding if at
* least one was a loose binding and a tight binding otherwise. */
binding_type = (*walk == '*') ? BT_LOOSE : BT_TIGHT;
while (*(walk + 1) == '.' || *(walk + 1) == '*') {
walk++;
if (*walk == '*') {
binding_type = BT_LOOSE;
}
}
xcb_xrm_finalize_component(entry, &state);
state.current_binding_type = binding_type;
break;
case '?':
state.chunk = MAX(state.chunk, CS_COMPONENTS);
if (state.chunk >= CS_PRE_VALUE_WHITESPACE) {
goto process_normally;
}
if (resource_only) {
goto done_error;
}
xcb_xrm_insert_component(entry, CT_WILDCARD, state.current_binding_type, NULL);
break;
case ' ':
case '\t':
/* Spaces are only allowed in the value, but spaces between the
* ':' and the value are omitted. */
if (state.chunk <= CS_PRE_VALUE_WHITESPACE) {
break;
}
goto process_normally;
case ':':
if (resource_only) {
goto done_error;
}
if (state.chunk == CS_INITIAL) {
goto done_error;
} else if (state.chunk == CS_COMPONENTS) {
xcb_xrm_finalize_component(entry, &state);
state.chunk = CS_PRE_VALUE_WHITESPACE;
break;
} else if (state.chunk >= CS_PRE_VALUE_WHITESPACE) {
state.chunk = CS_VALUE;
goto process_normally;
}
break;
default:
process_normally:
state.chunk = MAX(state.chunk, CS_COMPONENTS);
if (state.chunk == CS_PRE_VALUE_WHITESPACE) {
state.chunk = CS_VALUE;
}
if (state.chunk == CS_COMPONENTS) {
if ((*walk != '_' && *walk != '-') &&
(*walk < '0' || *walk > '9') &&
(*walk < 'a' || *walk > 'z') &&
(*walk < 'A' || *walk > 'Z')) {
goto done_error;
}
}
if (state.chunk < CS_VALUE) {
xcb_xrm_append_char(entry, &state, *walk);
} else {
if (*walk == '\\') {
if (*(walk + 1) == ' ') {
*(value_walk++) = ' ';
walk++;
} else if (*(walk + 1) == '\t') {
*(value_walk++) = '\t';
walk++;
} else if (*(walk + 1) == '\\') {
*(value_walk++) = '\\';
walk++;
} else if (*(walk + 1) == 'n') {
*(value_walk++) = '\n';
walk++;
} else if (isdigit(*(walk + 1)) && isdigit(*(walk + 2)) && isdigit(*(walk + 3)) &&
*(walk + 1) < '8' && *(walk + 2) < '8' && *(walk + 3) < '8') {
*(value_walk++) = (*(walk + 1) - '0') * 64 + (*(walk + 2) - '0') * 8 + (*(walk + 3) - '0');
walk += 3;
} else {
*(value_walk++) = *walk;
}
} else {
*(value_walk++) = *walk;
}
}
break;
}
}
if (state.chunk == CS_PRE_VALUE_WHITESPACE || state.chunk == CS_VALUE) {
*value_walk = '\0';
entry->value = strdup(value);
if (entry->value == NULL)
goto done_error;
} else if (!resource_only) {
/* Return error if there was no value for this entry. */
goto done_error;
} else {
/* Since in the case of resource_only we never went into CS_VALUE, we
* need to finalize the last component. */
xcb_xrm_finalize_component(entry, &state);
}
/* Assert that this entry actually had a resource component. */
if ((last = TAILQ_LAST(&(entry->components), components_head)) == NULL) {
goto done_error;
}
/* Assert that the last component is not a wildcard. */
if (last->type != CT_NORMAL) {
goto done_error;
}
FREE(str);
FREE(value);
FREE(state.buffer);
return 0;
done_error:
FREE(str);
FREE(value);
FREE(state.buffer);
xcb_xrm_entry_free(entry);
*_entry = NULL;
return -1;
}
/*
* Returns the number of components of the given entry.
*
*/
int __xcb_xrm_entry_num_components(xcb_xrm_entry_t *entry) {
int result = 0;
xcb_xrm_component_t *current;
TAILQ_FOREACH(current, &(entry->components), components) {
result++;
}
return result;
}
/*
* Compares the two entries.
* Returns 0 if they are the same and a negative error code otherwise.
*
*/
int __xcb_xrm_entry_compare(xcb_xrm_entry_t *first, xcb_xrm_entry_t *second) {
xcb_xrm_component_t *comp_first = TAILQ_FIRST(&(first->components));
xcb_xrm_component_t *comp_second = TAILQ_FIRST(&(second->components));
while (comp_first != NULL && comp_second != NULL) {
if (comp_first->type != comp_second->type)
return -FAILURE;
if (comp_first->binding_type != comp_second->binding_type)
return -FAILURE;
if (comp_first->type == CT_NORMAL && strcmp(comp_first->name, comp_second->name) != 0)
return -FAILURE;
comp_first = TAILQ_NEXT(comp_first, components);
comp_second = TAILQ_NEXT(comp_second, components);
}
/* At this point, at least one of the two is NULL. If they aren't both
* NULL, they have a different number of components and cannot be equal. */
if (comp_first != comp_second) {
return -FAILURE;
}
return SUCCESS;
}
/*
* Returns a string representation of this entry.
*
*/
char *__xcb_xrm_entry_to_string(xcb_xrm_entry_t *entry) {
char *result = NULL;
char *value_buf;
char *escaped_value;
xcb_xrm_component_t *component;
bool is_first = true;
assert(entry != NULL);
TAILQ_FOREACH(component, &(entry->components), components) {
char *tmp;
if (asprintf(&tmp, "%s%s%s", result == NULL ? "" : result,
(is_first && component->binding_type == BT_TIGHT)
? ""
: (component->binding_type == BT_TIGHT ? "." : "*"),
component->type == CT_NORMAL ? component->name : "?") < 0) {
FREE(result);
return NULL;
}
FREE(result);
result = tmp;
is_first = false;
}
escaped_value = __xcb_xrm_entry_escape_value(entry->value);
if (asprintf(&value_buf, "%s: %s", result, escaped_value) < 0) {
FREE(escaped_value);
FREE(result);
return NULL;
}
FREE(escaped_value);
FREE(result);
result = value_buf;
return result;
}
/*
* Copy the entry.
*
*/
xcb_xrm_entry_t *__xcb_xrm_entry_copy(xcb_xrm_entry_t *entry) {
xcb_xrm_entry_t *copy;
xcb_xrm_component_t *component;
assert(entry != NULL);
copy = calloc(1, sizeof(struct xcb_xrm_entry_t));
if (copy == NULL)
return NULL;
copy->value = strdup(entry->value);
if (copy->value == NULL) {
FREE(copy);
return NULL;
}
TAILQ_INIT(&(copy->components));
TAILQ_FOREACH(component, &(entry->components), components) {
xcb_xrm_component_t *new = calloc(1, sizeof(struct xcb_xrm_component_t));
if (new == NULL) {
xcb_xrm_entry_free(copy);
return NULL;
}
new->name = strdup(component->name);
if (new->name == NULL) {
xcb_xrm_entry_free(copy);
FREE(new);
return NULL;
}
new->type = component->type;
new->binding_type = component->binding_type;
TAILQ_INSERT_TAIL(&(copy->components), new, components);
}
return copy;
}
/*
* Escapes magic values.
*
*/
char *__xcb_xrm_entry_escape_value(const char *value) {
char *escaped;
char *outwalk;
int new_size = strlen(value) + 1;
if (value[0] == ' ' || value[0] == '\t')
new_size++;
for (const char *walk = value; *walk != '\0'; walk++) {
if (*walk == '\n' || *walk == '\\')
new_size++;
}
escaped = calloc(1, new_size);
if (escaped == NULL)
return NULL;
outwalk = escaped;
if (value[0] == ' ' || value[0] == '\t') {
*(outwalk++) = '\\';
}
for (const char *walk = value; *walk != '\0'; walk++) {
if (*walk == '\n') {
*(outwalk++) = '\\';
*(outwalk++) = 'n';
} else if (*walk == '\\') {
*(outwalk++) = '\\';
*(outwalk++) = '\\';
} else {
*(outwalk++) = *walk;
}
}
*outwalk = '\0';
return escaped;
}
/*
* Frees the given entry.
*
* @param entry The entry to be freed.
*
*/
void xcb_xrm_entry_free(xcb_xrm_entry_t *entry) {
if (entry == NULL)
return;
FREE(entry->value);
while (!TAILQ_EMPTY(&(entry->components))) {
xcb_xrm_component_t *component = TAILQ_FIRST(&(entry->components));
FREE(component->name);
TAILQ_REMOVE(&(entry->components), component, components);
FREE(component);
}
FREE(entry);
return;
}
|
@file:JvmName("StringExtensionsParameters")
package net.tassia
import org.junit.jupiter.params.provider.Arguments
import org.junit.jupiter.params.provider.Arguments.arguments
import java.util.stream.Stream
fun provideEQIC(): Stream<Arguments> {
return Stream.of(
arguments("Hello World!", "hello world!", true),
arguments("hello world!", "Hello World!", true),
arguments("Hello World!", "Hello World!", true),
arguments("", "", true),
arguments("Hello World!", "World Hello!", false),
)
}
|
module PrettyTopLevel where
import Core
import Pretty
import PrettyExpr
import SourceMap (SourceMap, lookupDef)
import Data.ByteString.Char8 (ByteString, unpack)
data PrettyTopLevel =
PrettyTopLevel (SourceMap ByteString) (TopLevelEnv ByteString)
instance Show (PrettyTopLevel) where
show = paren . pretty 0
instance Pretty PrettyTopLevel where
pretty _ (PrettyTopLevel sm (TopLevelEnv tle)) =
let ls = (\(k,v) -> unpack (lookupDef k sm) <> ": " <> show (PrettyExpr sm v)) <$> tle
in simple (unlines ls)
|
class PopulateCartPaymentTransaction
include Interactor
def call
context.transaction.transaction_type = 'cart_payment'
context.transaction.amount = - context.cart.total_price
context.cart.cart_items.each do |ci|
context.transaction.transaction_items.build(
price: ci.unit_price,
quantity: ci.quantity,
name: ci.product_name,
product: ci.product
)
end
end
end
|
CREATE TABLE sys."Login"
(
id bigint PRIMARY KEY NOT NULL,
"userId" bigint NOT NULL,
type smallint NOT NULL,
date timestamp(6) NOT NULL
);
COMMENT ON COLUMN sys."Login".id IS '唯一标识';
COMMENT ON COLUMN sys."Login"."userId" IS '用户id';
COMMENT ON COLUMN sys."Login".type IS '类型
0-密码登录
1-验证码登录';
COMMENT ON COLUMN sys."Login".date IS '创建日期';
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488365732208, 0, 0, '2017-03-01 10:55:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488379391681, 1488378558927, 0, '2017-03-01 14:43:11.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488379908786, 1488378449469, 0, '2017-03-01 14:51:48.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488379961820, 1488379935755, 0, '2017-03-01 14:52:41.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488386227319, 1488380023998, 0, '2017-03-01 16:37:07.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488387166592, 1488378449469, 0, '2017-03-01 16:52:46.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488423710531, 1488423676823, 0, '2017-03-02 03:01:50.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488428867991, 1488428734202, 0, '2017-03-02 04:27:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488473261705, 1488473066471, 0, '2017-03-02 16:47:41.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488516623869, 1488378449469, 0, '2017-03-03 04:50:23.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488540997715, 1488540991808, 0, '2017-03-03 11:36:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488541075533, 1488541028865, 0, '2017-03-03 11:37:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488541560585, 1488541531131, 0, '2017-03-03 11:46:00.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488569711657, 1488569508197, 0, '2017-03-03 19:35:11.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488569782719, 1488569732797, 0, '2017-03-03 19:36:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488569807192, 1488569798561, 0, '2017-03-03 19:36:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488572273190, 1488572225956, 0, '2017-03-03 20:17:53.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488572823466, 1488569798561, 0, '2017-03-03 20:27:03.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488572844863, 1488572838263, 0, '2017-03-03 20:27:24.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488572852849, 1488572838263, 0, '2017-03-03 20:27:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488572994566, 1488572838263, 0, '2017-03-03 20:29:54.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488602587483, 1488602583693, 0, '2017-03-04 04:43:07.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488602732477, 1488602583693, 0, '2017-03-04 04:45:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488602739644, 1488569508197, 0, '2017-03-04 04:45:39.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488616804093, 82012, 0, '2017-03-04 09:30:21.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488619853762, 82012, 0, '2017-03-04 09:30:53.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488619853763, 1488621574081, 0, '2017-03-04 09:59:34.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621807871, 1488621574081, 0, '2017-03-04 10:03:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621807872, 1488621574081, 0, '2017-03-04 10:03:43.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621827734, 1488621574081, 0, '2017-03-04 10:03:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621827735, 1488621574081, 0, '2017-03-04 10:04:03.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621846267, 1488621574081, 0, '2017-03-04 10:04:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621846268, 1488621873562, 0, '2017-03-04 10:04:33.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621876782, 1488621873562, 0, '2017-03-04 10:04:36.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488621876783, 1488621904086, 0, '2017-03-04 10:05:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488622533567, 1488621904086, 0, '2017-03-04 10:15:33.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488622533568, 1488622827857, 0, '2017-03-04 10:20:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488622831418, 1488622827857, 0, '2017-03-04 10:20:31.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488622831419, 1488473066471, 0, '2017-03-04 10:21:52.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488622919890, 1488473066471, 0, '2017-03-04 10:21:59.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488622919891, 1488622959038, 0, '2017-03-04 10:22:39.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488623021260, 1488622959038, 0, '2017-03-04 10:23:41.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488623021261, 1488622959038, 0, '2017-03-04 10:25:02.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488623107782, 1488622959038, 0, '2017-03-04 10:25:07.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488623107783, 1488622959038, 0, '2017-03-04 14:23:31.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488638599393, 1488622959038, 0, '2017-03-04 14:43:19.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488638599394, 1488622959038, 0, '2017-03-04 15:07:50.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488640073476, 1488622959038, 0, '2017-03-04 15:07:53.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488640255126, 1488640277910, 0, '2017-03-04 15:11:18.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488640325578, 1488640277910, 0, '2017-03-04 15:12:05.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488640325579, 1488640277910, 0, '2017-03-04 15:12:08.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488640330490, 1488640277910, 0, '2017-03-04 15:12:10.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488640330491, 1488640277910, 0, '2017-03-04 15:59:25.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488643309485, 1488640277910, 0, '2017-03-04 16:01:49.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488643309486, 1488643325534, 0, '2017-03-04 16:02:05.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488643330578, 1488643325534, 0, '2017-03-04 16:02:10.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488643414031, 1488643442503, 0, '2017-03-04 16:04:02.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488643446184, 1488643442503, 0, '2017-03-04 16:04:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488645359252, 82012, 0, '2017-03-04 16:43:41.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488645825647, 82012, 0, '2017-03-04 16:43:45.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488645825648, 82012, 0, '2017-03-04 16:44:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488645964496, 82012, 0, '2017-03-04 16:46:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488645964497, 82012, 0, '2017-03-04 16:46:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488645968694, 82012, 0, '2017-03-04 16:46:08.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707458563, 1488643442503, 0, '2017-03-05 09:51:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707494290, 1488643442503, 0, '2017-03-05 09:51:34.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707494291, 1488707511472, 0, '2017-03-05 09:51:51.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707514358, 1488707511472, 0, '2017-03-05 09:51:54.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707514359, 1488707511472, 1, '2017-03-05 09:52:15.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707539344, 1488707511472, 0, '2017-03-05 09:52:19.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707539345, 1488707572184, 0, '2017-03-05 09:52:52.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707575181, 1488707572184, 0, '2017-03-05 09:52:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707575182, 1488707617655, 0, '2017-03-05 09:53:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707626071, 1488707617655, 0, '2017-03-05 09:53:46.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707626072, 1488707617655, 0, '2017-03-05 09:53:52.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707635801, 1488707617655, 0, '2017-03-05 09:53:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707635802, 1488707617655, 0, '2017-03-05 09:57:26.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707850222, 1488707617655, 0, '2017-03-05 09:57:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707850223, 1488707874944, 0, '2017-03-05 09:57:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707877660, 1488707874944, 0, '2017-03-05 09:57:57.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707877661, 1488707874944, 1, '2017-03-05 09:58:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488707915649, 1488707874944, 0, '2017-03-05 09:58:35.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488727516722, 1488727542397, 0, '2017-03-05 15:25:42.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488727548031, 1488727542397, 0, '2017-03-05 15:25:48.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488803302239, 1488727542397, 0, '2017-03-06 12:28:24.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488803306640, 1488727542397, 0, '2017-03-06 12:28:26.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488803306641, 1488803343874, 0, '2017-03-06 12:29:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488803346374, 1488803343874, 0, '2017-03-06 12:29:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488803346375, 1488803343874, 0, '2017-03-06 15:06:09.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1488812773144, 1488803343874, 0, '2017-03-06 15:06:13.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489244600336, 1489244640435, 0, '2017-03-11 15:04:00.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489244647438, 1489244640435, 0, '2017-03-11 15:04:07.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489244647439, 1489244640435, 1, '2017-03-11 15:04:25.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489244669153, 1489244640435, 0, '2017-03-11 15:04:29.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489246281612, 1489244640435, 0, '2017-03-11 15:31:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489246300085, 1489244640435, 0, '2017-03-11 15:31:40.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489246300086, 1489244640435, 0, '2017-03-11 15:32:00.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489246323014, 1489244640435, 0, '2017-03-11 15:32:03.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489246323015, 1489246345610, 0, '2017-03-11 15:32:25.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489246350667, 1489246345610, 0, '2017-03-11 15:32:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298452742, 1488727542397, 0, '2017-03-12 06:01:02.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298464822, 1488727542397, 0, '2017-03-12 06:01:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298464823, 1489298483829, 0, '2017-03-12 06:01:23.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298490008, 1489298483829, 0, '2017-03-12 06:01:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298490009, 82005, 0, '2017-03-12 06:02:12.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298931649, 82005, 0, '2017-03-12 06:08:53.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298971069, 82005, 0, '2017-03-12 06:09:31.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489298971070, 82005, 0, '2017-03-12 06:09:40.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489299084011, 82005, 0, '2017-03-12 06:11:24.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489299139305, 90814, 0, '2017-03-12 06:12:23.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489317763943, 1489317784114, 0, '2017-03-12 11:23:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489317856607, 1489317784114, 0, '2017-03-12 11:24:16.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489934937901, 1489934955220, 0, '2017-03-19 14:49:15.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1489934967736, 1489934955220, 0, '2017-03-19 14:49:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490105370959, 1490105418731, 0, '2017-03-21 14:10:18.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490105432172, 1490105418731, 0, '2017-03-21 14:10:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490109211714, 1490109742863, 0, '2017-03-21 15:22:23.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490109746858, 1490109742863, 0, '2017-03-21 15:22:26.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490109746859, 1490109845208, 0, '2017-03-21 15:24:05.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490109847412, 1490109845208, 0, '2017-03-21 15:24:07.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490109847413, 1490109845208, 1, '2017-03-21 15:25:39.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490109943463, 1490109845208, 0, '2017-03-21 15:25:43.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490420549513, 1488707874944, 0, '2017-03-25 05:43:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490420612726, 1488707874944, 0, '2017-03-25 05:43:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490420612727, 1490420651686, 0, '2017-03-25 05:44:11.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490420694018, 1490420651686, 0, '2017-03-25 05:44:54.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490425995551, 1490427139175, 0, '2017-03-25 07:32:19.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427142481, 1490427139175, 0, '2017-03-25 07:32:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427142482, 1490427139175, 0, '2017-03-25 07:32:25.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427147907, 1490427139175, 0, '2017-03-25 07:32:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427147908, 1490427139175, 1, '2017-03-25 07:32:46.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427169820, 1490427139175, 0, '2017-03-25 07:32:49.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427169821, 1490427139175, 1, '2017-03-25 07:36:09.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427460928, 1490427139175, 0, '2017-03-25 07:37:40.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427550424, 1490427577823, 0, '2017-03-25 07:39:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490427581040, 1490427577823, 0, '2017-03-25 07:39:41.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490584927873, 1490584952968, 0, '2017-03-27 03:22:33.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490584967616, 1490584952968, 0, '2017-03-27 03:22:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490585175679, 1490585192903, 0, '2017-03-27 03:26:33.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490585175680, 1490585226494, 0, '2017-03-27 03:27:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490585175681, 1490586230028, 0, '2017-03-27 03:43:50.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490586242829, 1490586230028, 0, '2017-03-27 03:44:02.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490586242830, 1490586417277, 0, '2017-03-27 03:46:57.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490586420868, 1490586417277, 0, '2017-03-27 03:47:00.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490586420869, 1490587219677, 0, '2017-03-27 04:00:20.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490587222853, 1490587219677, 0, '2017-03-27 04:00:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490587222854, 1490587219677, 0, '2017-03-27 04:00:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490587232018, 1490587219677, 0, '2017-03-27 04:00:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490763654616, 1489317784114, 0, '2017-03-29 05:01:03.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490763665719, 1489317784114, 0, '2017-03-29 05:01:05.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490763665720, 1490763680229, 0, '2017-03-29 05:01:20.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490763684749, 1490763680229, 0, '2017-03-29 05:01:24.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490763684750, 1490763889677, 0, '2017-03-29 05:04:49.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490763892907, 1490763889677, 0, '2017-03-29 05:04:52.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490763892908, 1490763889677, 1, '2017-03-29 05:09:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490764146839, 1490763889677, 0, '2017-03-29 05:09:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490764146840, 1490763889677, 0, '2017-03-29 05:09:17.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490764160920, 1490763889677, 0, '2017-03-29 05:09:20.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490796426168, 1490796536716, 0, '2017-03-29 14:08:56.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490796539768, 1490796536716, 0, '2017-03-29 14:08:59.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490796539769, 1490796536716, 1, '2017-03-29 14:09:25.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490796612462, 1490796536716, 0, '2017-03-29 14:10:12.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490796612463, 1490796536716, 0, '2017-03-29 14:10:14.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490797130482, 1490796536716, 0, '2017-03-29 14:18:50.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490797130483, 1490796536716, 0, '2017-03-29 14:21:17.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490799078694, 1490796536716, 0, '2017-03-29 14:51:18.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490799078695, 1490796536716, 0, '2017-03-29 15:04:49.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863478648, 82003, 0, '2017-03-30 08:44:40.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863574695, 82003, 0, '2017-03-30 08:46:14.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863574696, 82005, 0, '2017-03-30 08:46:16.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863617906, 82005, 0, '2017-03-30 08:46:57.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863617907, 70793, 1, '2017-03-30 08:47:12.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863735458, 70793, 0, '2017-03-30 08:48:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863735459, 93793, 0, '2017-03-30 08:49:01.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863793209, 93793, 0, '2017-03-30 08:49:53.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490863793210, 82006, 0, '2017-03-30 08:50:07.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490864162242, 82006, 0, '2017-03-30 08:56:02.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490864162243, 82044, 1, '2017-03-30 08:56:39.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490864359458, 82044, 0, '2017-03-30 08:59:19.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490874790302, 82040, 0, '2017-03-30 11:53:14.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490874856899, 82040, 0, '2017-03-30 11:54:16.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490874856900, 82055, 0, '2017-03-30 11:54:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490875711368, 82055, 0, '2017-03-30 12:08:31.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490875711369, 82056, 0, '2017-03-30 12:08:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490875721491, 82056, 0, '2017-03-30 12:08:41.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490875721492, 82060, 0, '2017-03-30 12:08:43.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490875725467, 82060, 0, '2017-03-30 12:08:45.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490875725468, 1490875855144, 0, '2017-03-30 12:10:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490875857334, 1490875855144, 0, '2017-03-30 12:10:57.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490880027108, 82054, 0, '2017-03-30 13:20:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490880030859, 82054, 0, '2017-03-30 13:20:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490880030860, 1490880220255, 0, '2017-03-30 13:23:40.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490880236865, 1490880220255, 0, '2017-03-30 13:23:56.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490880236866, 1490880254410, 0, '2017-03-30 13:24:14.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490880256555, 1490880254410, 0, '2017-03-30 13:24:16.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490973548451, 1490973670928, 0, '2017-03-31 15:21:11.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490974102842, 1490973670928, 0, '2017-03-31 15:28:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490974212206, 70793, 0, '2017-03-31 15:30:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1490974347168, 70793, 0, '2017-03-31 15:32:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491014963729, 82049, 0, '2017-04-01 02:49:29.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491014970908, 82049, 0, '2017-04-01 02:49:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491014970909, 82051, 0, '2017-04-01 02:49:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491014975055, 82051, 0, '2017-04-01 02:49:35.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491014975056, 1490420651686, 0, '2017-04-01 02:49:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491014978929, 1490420651686, 0, '2017-04-01 02:49:38.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491014978930, 1491015049274, 0, '2017-04-01 02:50:49.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491015064226, 1491015049274, 0, '2017-04-01 02:51:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491015064227, 70793, 0, '2017-04-01 02:57:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491130974601, 82049, 0, '2017-04-02 11:03:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491130988304, 82049, 0, '2017-04-02 11:03:08.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491130988305, 82050, 0, '2017-04-02 11:03:10.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491130992091, 82050, 0, '2017-04-02 11:03:12.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491130992092, 1490420651686, 0, '2017-04-02 11:03:13.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491130996226, 1490420651686, 0, '2017-04-02 11:03:16.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491130996227, 1491131016872, 0, '2017-04-02 11:03:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491131020967, 1491131016872, 0, '2017-04-02 11:03:40.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491131114629, 1491131208618, 0, '2017-04-02 11:06:48.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491131215621, 1491131208618, 0, '2017-04-02 11:06:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491131215622, 1491131208618, 0, '2017-04-02 12:32:26.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491136484469, 1491131208618, 0, '2017-04-02 12:34:44.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491137049692, 1491137170621, 0, '2017-04-02 12:46:10.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491137175158, 1491137170621, 0, '2017-04-02 12:46:15.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491137175159, 70793, 0, '2017-04-02 12:46:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210186666, 82046, 0, '2017-04-03 09:05:37.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210340156, 82046, 0, '2017-04-03 09:05:40.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210340157, 82041, 0, '2017-04-03 09:05:41.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210344197, 82041, 0, '2017-04-03 09:05:44.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210344198, 1491210361659, 1, '2017-04-03 09:06:23.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210385826, 1491210361659, 0, '2017-04-03 09:06:25.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210385827, 1491210948591, 0, '2017-04-03 09:15:48.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210951970, 1491210948591, 0, '2017-04-03 09:15:51.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210951971, 1491210948591, 1, '2017-04-03 09:16:01.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210964359, 1491210948591, 0, '2017-04-03 09:16:04.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210964360, 1491210948591, 0, '2017-04-03 09:16:07.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491210969546, 1491210948591, 0, '2017-04-03 09:16:09.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491231490642, 82003, 0, '2017-04-03 14:58:13.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491231560497, 82003, 0, '2017-04-03 14:59:20.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491231560498, 93793, 0, '2017-04-03 14:59:31.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491231602048, 93793, 0, '2017-04-03 15:00:02.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491231602049, 93793, 0, '2017-04-03 15:09:42.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491232187135, 93793, 0, '2017-04-03 15:09:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491278106043, 1490109742863, 0, '2017-04-04 03:55:15.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491278117918, 1490109742863, 0, '2017-04-04 03:55:17.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491278117919, 1490427577823, 0, '2017-04-04 03:55:19.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491278121481, 1490427577823, 0, '2017-04-04 03:55:21.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491278121482, 1491278203315, 0, '2017-04-04 03:56:43.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491283571224, 1491278203315, 0, '2017-04-04 05:26:11.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491283708324, 1491210314249, 1, '2017-04-04 05:28:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491283800948, 1491210314249, 0, '2017-04-04 05:30:00.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491706177615, 1491706263570, 0, '2017-04-09 02:51:03.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491713830487, 1491713931091, 0, '2017-04-09 04:58:51.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491713972850, 1491713931091, 0, '2017-04-09 04:59:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491728210153, 1490427139175, 0, '2017-04-09 08:56:53.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491728216317, 1490427139175, 0, '2017-04-09 08:56:56.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491728216318, 82054, 0, '2017-04-09 08:56:58.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491728221137, 82054, 0, '2017-04-09 08:57:01.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491728221138, 1491728303733, 0, '2017-04-09 08:58:23.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491728316688, 1491728303733, 0, '2017-04-09 08:58:36.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491798585269, 1490420651686, 0, '2017-04-10 04:30:17.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491798619163, 1490420651686, 0, '2017-04-10 04:30:19.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491798619164, 1491015049274, 0, '2017-04-10 04:30:21.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491798623156, 1491015049274, 0, '2017-04-10 04:30:23.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491798623157, 1491798705995, 0, '2017-04-10 04:31:46.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491798824157, 1491798705995, 0, '2017-04-10 04:33:44.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491830822528, 1491830893899, 0, '2017-04-10 13:28:14.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491831356223, 1491830893899, 0, '2017-04-10 13:35:56.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491838437130, 1491838521279, 0, '2017-04-10 15:35:21.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491838535040, 1491838521279, 0, '2017-04-10 15:35:35.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491917352614, 1491728303733, 0, '2017-04-11 13:29:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491917364596, 1491728303733, 0, '2017-04-11 13:29:24.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491917364597, 1491917447333, 0, '2017-04-11 13:30:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1491917916123, 1491917447333, 0, '2017-04-11 13:38:36.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492865677465, 82058, 0, '2017-04-22 12:54:45.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492865687807, 82058, 0, '2017-04-22 12:54:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492865687808, 1492866224074, 0, '2017-04-22 13:03:44.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492866227861, 1492866224074, 0, '2017-04-22 13:03:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492866227862, 1492866224074, 0, '2017-04-22 13:03:52.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492866235005, 1492866224074, 0, '2017-04-22 13:03:55.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492866235006, 1492866322486, 0, '2017-04-22 13:05:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492866325550, 1492866322486, 0, '2017-04-22 13:05:25.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492936150349, 1492936169722, 0, '2017-04-23 08:29:30.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492936172897, 1492936169722, 0, '2017-04-23 08:29:32.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492936172898, 1492936169722, 0, '2017-04-23 08:33:44.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492936427137, 1492936169722, 0, '2017-04-23 08:33:47.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492936427138, 1492936169722, 0, '2017-04-23 08:37:29.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1492936651770, 1492936169722, 0, '2017-04-23 08:37:31.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493456282571, 90814, 0, '2017-04-29 08:58:09.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493457036233, 90814, 0, '2017-04-29 09:10:36.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480121888, 1490427139175, 0, '2017-04-29 15:35:26.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480129111, 1490427139175, 0, '2017-04-29 15:35:29.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480129112, 1493480142628, 0, '2017-04-29 15:35:42.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480148564, 1493480142628, 0, '2017-04-29 15:35:48.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480148565, 1493480142628, 0, '2017-04-29 15:35:54.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480156757, 1493480142628, 0, '2017-04-29 15:35:56.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480156758, 90814, 0, '2017-04-29 15:36:01.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480162695, 90814, 0, '2017-04-29 15:36:02.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480162696, 93793, 0, '2017-04-29 15:36:06.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493480189011, 93793, 0, '2017-04-29 15:36:29.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493747501699, 1493747512860, 0, '2017-05-02 17:51:53.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493747519493, 1493747512860, 0, '2017-05-02 17:51:59.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493747519494, 1493747777770, 0, '2017-05-02 17:56:17.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493747780534, 1493747777770, 0, '2017-05-02 17:56:20.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493748571679, 1493748594003, 0, '2017-05-02 18:09:54.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493748596459, 1493748594003, 0, '2017-05-02 18:09:56.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493748596460, 1493748615711, 0, '2017-05-02 18:10:15.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493748617966, 1493748615711, 0, '2017-05-02 18:10:17.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493748617967, 1493749090643, 0, '2017-05-02 18:18:10.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493749100206, 1493749090643, 0, '2017-05-02 18:18:20.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493836047659, 1493836043151, 0, '2017-05-03 18:27:27.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493836049490, 1493836043151, 0, '2017-05-03 18:27:29.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493883116023, 1493883110132, 0, '2017-05-04 07:31:56.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493883118007, 1493883110132, 0, '2017-05-04 07:31:58.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493890214303, 1493890214167, 0, '2017-05-04 09:30:14.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493890216183, 1493890214167, 0, '2017-05-04 09:30:16.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493890699755, 1493890303473, 0, '2017-05-04 09:38:19.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493890702129, 1493890303473, 0, '2017-05-04 09:38:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493891565732, 82001, 0, '2017-05-04 09:52:45.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493891782837, 82001, 0, '2017-05-04 09:56:22.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493891784591, 82002, 0, '2017-05-04 09:56:24.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493891793881, 82002, 0, '2017-05-04 09:56:33.000000');
INSERT INTO sys."Login" (id, "userId", type, date) VALUES (1493891806372, 38710, 1, '2017-05-04 09:56:46.000000');
|
module WaveSimulator
using Base.Cartesian, ComputationalResources, TiledIteration, ProgressMeter, ValueHistories
export
CPU1,
CPUThreads,
CUDALibs,
UniformWave,
BoxDomain,
Simulator,
simulate,
simulate_gauss,
simulate!,
update!,
toimage
abstract type Domain{N} end
abstract type State{N,T,D<:Domain} end
abstract type Wave{N,T<:AbstractFloat} end
abstract type Backend{R<:AbstractResource} end
abstract type CPUBackend{R<:AbstractResource} <: Backend{R} end
include("wave.jl")
include("simulator.jl")
include("boxdomain.jl")
include("utils.jl")
include("hooks.jl")
include("backends/cpu.jl")
using CUDAapi
if has_cuda()
include("backends/cuda.jl")
end
end
|
# Xhear 6.0
基于全新的 stanz 7 打造,大幅优化代码,性能更强;
<!-- 重构原因:因Xhear 6 添加 x-fill 特性后,开发风格大幅度往 x-fill 上靠,所以重构向x-fill更友好的风格; -->
<!-- # Xhear 5.0
将基于 stanz 6.0 开发,大幅度优化代码,提高兼容性;
将新增 `:attr` 等模板语法;
将采用 web components 方案封装,相比4.0性能更强,体积更小,使用更容易;
相比 xhear 4.0,优化事件绑定机制;
想要运行 watch编译,请将 stanz 6 放在与项目同一个目录;
# Xhear 4.0
基于新的 stanz 5.0 开发,增强性能,优化代码结构;
## Xhear 3.0
xhear 3.0 摆脱了jQuery,借鉴jQuery,脱胎于stanz;
相对于xhear2,大量精简代码,减少错误,增强性能;
### Xhear 2.0
Xhear 1.0 使用过程中发现很多问题,需要精简很多没用和容易出错的功能,干脆重构; -->
|
using System.Collections.Generic;
namespace Akismet.Net
{
/// <summary>
/// Describes a response from Akismet
/// </summary>
public class AkismetResponse
{
/// <summary>
/// Indicates the status of the submitted comment
/// </summary>
public SpamStatus SpamStatus { get; set; }
/// <summary>
/// Value of X-akismet-pro-tip header, if present
/// </summary>
public string ProTip { get; set; }
/// <summary>
/// Errors if any
/// </summary>
public List<string> AkismetErrors { get; } = new List<string>();
/// <summary>
/// Value of X-akismet-debug-help header, if present
/// </summary>
public string AkismetDebugHelp { get; set; }
}
/// <summary>
///
/// </summary>
public enum SpamStatus
{
/// <summary>
///
/// </summary>
Spam,
/// <summary>
///
/// </summary>
Ham,
/// <summary>
///
/// </summary>
Unspecified
}
}
|
package com.xplusj.factory;
import com.xplusj.ExpressionContext;
import com.xplusj.operator.unary.DefaultExpressionUnaryOperatorFactory;
import com.xplusj.operator.unary.UnaryOperator;
import com.xplusj.operator.unary.UnaryOperatorDefinition;
public interface ExpressionUnaryOperatorFactory {
UnaryOperator create(UnaryOperatorDefinition definition, ExpressionContext context);
static ExpressionUnaryOperatorFactory defaultFactory(){
return DefaultExpressionUnaryOperatorFactory.getInstance();
}
}
|
package b //@diag("", "go list", "import cycle not allowed")
import (
"golang.org/x/tools/internal/lsp/circular/one"
)
func Test1() {
one.Test()
}
|
package com.mmoreno.favmovies.model
import androidx.lifecycle.LiveData
import com.mmoreno.favmovies.app.FavMoviesApplication
import com.mmoreno.favmovies.model.concurrency.ioThread
/**
* Custom class following the repository pattern
* for interacting with the Movie Table
* @author [Mario]
*/
class MovieRepository : Repository<Movie, Int> {
private val movieDao = FavMoviesApplication.database.movieDao()
/**
* Custom method for adding a new movie record
* @param [entity] instance of [Movie] to be saved
* @return [Unit]
*/
override fun add(entity: Movie) {
ioThread {
movieDao.insert(entity)
}
}
/**
* Custom method for deleting a movie record
* @author [entity] instance of [Movie] to be deleted
* @return [Unit]
*/
override fun delete(entity: Movie) {
ioThread {
movieDao.delete(entity)
}
}
/**
* Method for updating a movie record
* @param [entity] instance of [Movie] to be updated
* @return [Unit]
*/
override fun update(entity: Movie) {
ioThread {
movieDao.update(entity)
}
}
/**
* Method for retrieving a single record
* @param [id] identifies the unique id of a Movie record
* @return [LiveData]
*/
override fun findById(id: Int): LiveData<Movie> {
return movieDao.findById(id)
}
/**
* Method for retrieving all the Movie records saved in the Movie
* table
* @return [LiveData]
*/
override fun getAll(): LiveData<MutableList<Movie>> = movieDao.getMovies()
}
|
ENV["RAILS_ENV"] = "test"
require File.expand_path(File.dirname(__FILE__) + "/../config/environment")
require 'test_help'
$LOAD_PATH.unshift File.dirname(__FILE__) # NEEDED for rake test:coverage
class ActiveSupport::TestCase
fixtures :all
# def login_as( user=nil )
# uid = ( user.is_a?(User) ) ? user.uid : user
# if !uid.blank?
# @request.session[:uid] = uid
# User.find_or_create_by_uid(uid)
# end
# end
# alias :login :login_as
# alias :log_in :login_as
#
# def assert_redirected_to_login
# assert_response :redirect
## puts @response.inspect
# assert_match "login", @response.redirected_to
## assert_match "https://auth-test.berkeley.edu/cas/login",
## @response.redirected_to
# end
#
# def assert_redirected_to_logout
# assert_response :redirect
# assert_match "logout", @response.redirected_to
## assert_match "https://auth-test.berkeley.edu/cas/logout",
## @response.redirected_to
# end
end
class ActionController::TestCase
setup :turn_https_on
end
|
import config from "./config";
import { HttpService } from "./http";
const { app } = new HttpService({
logger: config.debug ? "debug" : "warn",
});
app.listen(+config.port, config.host, err => {
if (err) throw err;
});
|
from smtplib import SMTPException
from django.conf import settings
from django.core.mail import send_mail
from PIL import Image
def send_mail_notify(client_1, client_2):
site_service_email = settings.EMAIL_HOST_USER
message_follower = (
f'Вы понравились {client_2["username"]}! ' +
f'Почта участника: {client_2["email"]}'
)
message_person = (
f'Вы понравились {client_1.username}! ' +
f'Почта участника: {client_1.email}'
)
try:
send_mail(
'Взаимная симпатия!',
message_follower, site_service_email,
[client_1.email],
fail_silently=False
)
except SMTPException as err:
err_message = f'{client_1.email} - {type(err)}'
send_mail(
'Ошибка отправки сообщения',
err_message, site_service_email,
[site_service_email]
)
try:
send_mail(
'Взаимная симпатия!',
message_person, site_service_email,
[client_2['email']],
fail_silently=False
)
except SMTPException as err:
email = client_2['email']
err_message = f'{email} - {type(err)}'
send_mail(
'Ошибка отправки сообщения',
err_message, site_service_email,
[site_service_email]
)
def set_watermark_full_filling(input_url):
"""Функция накладывет водяной знак на аватар пользователя.
Внутри использует преобразование размера загруженного изображения
под размер водяного знака.
"""
position = (0, 0)
image_url = str(input_url)
watermark_url = settings.STATIC_WATERMARK
watermark = Image.open(watermark_url)
width, height = watermark.size
input_image = Image.open(image_url)
output_image = get_perfect_size_image(input_image, width, height)
combined_image = Image.new('RGBA', (width, height), (0, 0, 0, 0))
combined_image.paste(output_image, (0, 0))
combined_image.paste(watermark, position, mask=watermark)
output_image = combined_image.convert('RGB')
output_image.save(image_url)
def get_perfect_size_image(input_image, water_width, water_height):
"""Функция адаптирует изображение пользователя по размерам изображения
водяного знака. Используется пропорциональное соотношение ширины и высоты.
"""
img_width, img_height = input_image.size
koef_water = water_width / water_height
koef_img = img_width / img_height
if koef_water == koef_img:
input_image = input_image.resize(
(water_width, water_height),
Image.ANTIALIAS
)
return input_image
elif koef_water > koef_img:
new_img_height = img_height * koef_img
delta = (img_height - new_img_height) // 2
cropped = input_image.crop((0, delta, img_width, img_height - delta))
elif koef_water < koef_img:
new_img_width = img_width / koef_img
delta = (img_width - new_img_width) // 2
cropped = input_image.crop((delta, 0, img_width - delta, img_height))
cropped = cropped.resize((water_width, water_height), Image.ANTIALIAS)
return cropped
|
/*
Copyright (c) 2020-21 Project re-Isearch and its contributors: See CONTRIBUTORS.
It is made available and licensed under the Apache 2.0 license: see LICENSE
*/
/*@@@
File: dlist.cxx
Version: 1.00
$Revision: 1.2 $
Description: Class DATELIST
Author: Edward Zimmermann [email protected]
@@@*/
#include <stdlib.h>
#include <errno.h>
#include "dlist.hxx"
#include "magic.hxx"
#define DEBUG 0
typedef UINT4 _Count_t;
#define SIZEOF_HEADER sizeof(_Count_t)+1 /* size of magic */
static const size_t BASIC_CHUNK=50;
void DATELIST::Clear()
{
Count = 0;
FileName.Empty();
Pointer = 0;
StartIndex = EndIndex = 0; // was -1;
}
void DATELIST::Empty()
{
if(table) delete [] table;
MaxEntries = 0;
table = NULL;
Count = 0;
FileName.Empty();
Pointer = 0;
StartIndex = EndIndex = 0; // was -1;
}
DATELIST::DATELIST()
{
Ncoords = 2;
MaxEntries = 0;
table = NULL;
Count = 0;
Pointer = 0;
StartIndex = EndIndex = 0; // was -1;
}
DATELIST::DATELIST(INT n)
{
Ncoords = n;
MaxEntries = 0;
table = NULL;
Count = 0;
Pointer = 0;
StartIndex = EndIndex = 0; // was -1;
}
void DATELIST::ResetHitPosition()
{
Pointer = ( (Relation != 1) ? StartIndex : 0 );
}
GPTYPE DATELIST::GetNextHitPosition()
{
GPTYPE Value;
if(StartIndex == EndIndex) // was StartIndex == -1
return((GPTYPE)-1);
if(Relation != 1) {
if(Pointer>EndIndex)
return((GPTYPE)-1);
Value=table[Pointer].GetGlobalStart();
++Pointer;
return(Value);
} else {
if(Pointer >= StartIndex)
while(Pointer <= EndIndex)
++Pointer;
if(Pointer >= Count)
return((GPTYPE)-1);
Value=table[Pointer].GetGlobalStart();
++Pointer;
return(Value);
}
}
static int SortCmp(const void* x, const void* y)
{
return Compare( (*((PDATEFLD)x)).GetValue(), (*((PDATEFLD)y)).GetValue() );
}
static int SortCmpGP(const void* x, const void* y)
{
const GPTYPE gp1 = (*((PDATEFLD)x)).GetGlobalStart();
const GPTYPE gp2 = (*((PDATEFLD)y)).GetGlobalStart();
if (gp1 > gp2)
return 1;
if (gp1 < gp2)
return -1;
return 0;
}
GDT_BOOLEAN DATELIST::Expand(size_t Entries)
{
if (Entries < MaxEntries)
return GDT_TRUE;
return Resize(Entries + (BASIC_CHUNK*Ncoords));
}
GDT_BOOLEAN DATELIST::Resize(size_t Entries)
{
if (Entries == 0)
{
Clear();
return GDT_TRUE;
}
DATEFLD *temp;
try {
temp =new DATEFLD[Entries];
} catch (...) {
message_log (LOG_PANIC, "Memory allocation failure. Can't build Date field array!");
return GDT_FALSE;
}
size_t CopyCount;
if(Entries>Count)
CopyCount=Count;
else
Count = (CopyCount=Entries);
for(size_t x=0; x<CopyCount; x++)
temp[x]=table[x];
if(table)
delete [] table;
table=temp;
MaxEntries=Entries;
return GDT_TRUE;
}
void DATELIST::Sort()
{
QSORT((void *)table, Count, sizeof(DATEFLD), SortCmp);
}
void DATELIST::SortByGP()
{
QSORT((void *)table, Count, sizeof(DATEFLD), SortCmpGP);
}
SearchState DATELIST::Matcher(const SRCH_DATE& Key,
const SRCH_DATE& A, const SRCH_DATE& B, const SRCH_DATE& C,
INT4 Relation, INT4 Type)
{
//cerr << "Matcher: Key, A, B, C, Type // Rel=" << (int)Relation <<endl;
switch (Relation) {
case ZRelGE: // greater than or equals
if ((B>=Key) && (Type==-1 || A<Key))
{
return(MATCH); // exact place - lower boundary
}
else if (A>=Key)
{
return(TOO_LOW); // key too low
}
else
{
return(TOO_HIGH); // key too high
}
case ZRelGT: // greater than
if ((B>Key) && (Type==-1 || A<=Key))
{
return(MATCH); // exact place - lower boundary
}
else if (A>Key)
{
return(TOO_LOW); // key too low
}
else
{
return(TOO_HIGH); // key too high
}
case ZRelLE: // less than or equals
if ((B<=Key) && (Type==0 || C>Key))
return(MATCH); // exact place - upper boundary
else if (C<=Key)
return(TOO_HIGH);
else
return(TOO_LOW);
case ZRelLT: // less than
if ((B<Key) && (Type==0 || C>=Key))
{
return(MATCH); // exact place - upper boundary
}
else if (C<Key)
{
return(TOO_HIGH);
}
else
{
return(TOO_LOW);
}
default: break;
}
message_log (LOG_PANIC, "Hideous Matching Error");
return(NO_MATCH);
}
// Gp Search
SearchState DATELIST::Matcher(GPTYPE Key, GPTYPE A, GPTYPE B, GPTYPE C,
INT4 Relation, INT4 Type)
{
switch (Relation) {
case ZRelGE: // greater than or equals
if ((B>=Key) && (Type==-1 || A<Key))
return(MATCH); // exact place - lower boundary
else if (A>=Key)
return(TOO_LOW); // key too low
else
return(TOO_HIGH); // key too high
case ZRelGT: // greater than
if ((B>Key) && (Type==-1 || A<=Key))
return(MATCH); // exact place - lower boundary
else if (A>Key)
return(TOO_LOW); // key too low
else
return(TOO_HIGH); // key too high
case ZRelLE: // less than or equals
if ((B<=Key) && (Type==0 || C>Key))
return(MATCH); // exact place - upper boundary
else if (C<=Key)
return(TOO_HIGH);
else
return(TOO_LOW);
case ZRelLT: // less than
if ((B<Key) && (Type==0 || C>=Key))
return(MATCH); // exact place - upper boundary
else if (C<Key)
return(TOO_HIGH);
else
return(TOO_LOW);
}
message_log (LOG_PANIC, "Hideous Matching Error");
return(NO_MATCH);
}
// Search for the GP INT4 values
// Ultimately, this routine will try to load the table in one chunk of
// memory. If it succeeds, it'll call MemFind to do the search in memory.
// Otherwise, it'll call DiskFind to do the search on disk.
SearchState DATELIST::Find(const STRING& Fn, GPTYPE Key, INT4 Relation, INT4 *Index)
{
SetFileName(Fn);
return DiskFind(Fn, Key, Relation, Index);
}
// Ultimately, this routine will try to load the table in one chunk of
// memory. If it succeeds, it'll call MemFind to do the search in memory.
// Otherwise, it'll call DiskFind to do the search on disk.
SearchState DATELIST::Find(const STRING& Fn, const SRCH_DATE& Key, INT4 Relation, INT4 *Index)
{
SetFileName(Fn);
return DiskFind(Fn, Key, Relation, Index);
}
SearchState DATELIST::Find(const SRCH_DATE& Key, INT4 Relation, INT4 *Index)
{
return DiskFind(FileName, Key, Relation, Index);
}
SearchState DATELIST::Find(GPTYPE Key, INT4 Relation, INT4 *Index)
{
return DiskFind(FileName, Key, Relation, Index);
}
SearchState DATELIST::MemFind(const SRCH_DATE& Key, INT4 Relation, INT4 *Index)
{
return NO_MATCH;
}
SearchState DATELIST::DiskFind(STRING Fn, const SRCH_DATE& Key, INT4 Relation, INT4 *Index)
{
// INT4 B;
PFILE Fp = fopen(Fn, "rb");
if (!Fp) {
message_log (LOG_ERRNO, "Can't open date index '%s'", Fn.c_str());
*Index = -1;
return NO_MATCH;
} else {
INT4 Total;
INT Low, High, X, OX;
SearchState State;
INT Type=0;
if (getObjID(Fp) != objDLIST)
{
fclose(Fp);
if (feof(Fp))
message_log (LOG_INFO, "Empty index: '%s'", Fn.c_str());
else
message_log (LOG_PANIC, "%s not a date index??", Fn.c_str());
*Index = -1;
return NO_MATCH;
}
Read(&Total, Fp);
Low = 0;
High = Total - 1;
X = High / 2;
DATEFLD lowerBound;
DATEFLD upperBound;
DATEFLD value;
do {
OX = X;
//cerr << "X = " << X << endl;
if ((X > 0) && (X < High)) {
fseek(Fp, SIZEOF_HEADER + (X-1) * sizeof(DATEFLD), SEEK_SET);
Type=INSIDE;
} else if (X <= 0) {
//cerr << "Reading from Start" << endl;
fseek(Fp, SIZEOF_HEADER + X * sizeof(DATEFLD), SEEK_SET);
Type=AT_START;
} else if (X >= High) {
fseek(Fp, SIZEOF_HEADER + (X-1) * sizeof(DATEFLD), SEEK_SET);
Type=AT_END;
}
if (Type != AT_START)
{
Read(&lowerBound, Fp);
//cerr << "Got lowerBound = " << lowerBound.GetValue() << endl;
}
Read(&value, Fp); // Gps, NumericValue
// cerr << "Got value = " << value.GetValue() << endl;
// If we're at the start, we need to read the first value into
// NumericValue, but we don't want to leave LowerBound
// uninitialized. This will also handle the case when we only
// have two values in the index.
if (Type == AT_START)
lowerBound = value;
//cerr << "lowerBound = " << lowerBound.GetValue() << endl;
//cerr << "value = " << value.GetValue() << endl;
if(Type != AT_END)
{
Read(&upperBound, Fp); // Dummy, UpperBound
//cerr << "upperBound = " << upperBound.GetValue() << endl;
}
// Similarly, if we're at the end and can't read in a new value
// for UpperBound, we don't want it uninitialized, either.
if (Type == AT_END)
upperBound = value;
//cerr << "Relation = " << (int)Relation << endl;
State = Matcher(Key, lowerBound.GetValue(), value.GetValue(), upperBound.GetValue(), Relation, Type);
if (State == MATCH) {
//cerr << "We Got a match" << endl;
// We got a match
fclose(Fp);
*Index = X;
return MATCH;
} else if ((State == TOO_HIGH) && (Type == AT_END)) {
// We didn't get a match, but we ran off the upper end, so
// the key is bigger than anything indexed
//cerr << "Ran off the upper end" << endl;
fclose(Fp);
*Index = -1;
return State;
} else if ((State == TOO_LOW) && (Type == AT_START)) {
// We didn't get a match, but we ran off the lower end, so
// the key is smaller than anything indexed
//cerr << "Ran off the lower end" << endl;
fclose(Fp);
*Index = -1;
return State;
} else if (Low >= High) {
// If Low is >= High, there aren't any more values to check
// so we're done whether we got a match or not, and if we got
// here, there wasn't a match. This probably won't happen -
// at least, we expect that these conditions will be caught
// by one of the preceeding, but it pays to be safe.
//cerr << "Low >= High ??" << endl;
fclose(Fp);
*Index = -1;
return NO_MATCH;
}
if (State == TOO_LOW)
High = X;
else
Low = X + 1;
X = (Low + High) / 2;
if (X < 0) {
X = 0;
} else {
if (X >= Total) {
X = Total - 1;
}
}
} while (X != OX);
}
fclose(Fp);
*Index = -1;
return NO_MATCH;
}
// This one searches for Gps!!!
SearchState DATELIST::DiskFind(STRING Fn, GPTYPE Key, INT4 Relation, INT4 *Index)
{
PFILE Fp = fopen(Fn, "rb");
if (!Fp) {
message_log (LOG_ERRNO, "Datelist Index open faolure '%s'", Fn.c_str());
*Index = -1;
return NO_MATCH;
} else {
_Count_t Total;
INT Low, High, X, OX;
SearchState State;
INT Type=0;
DOUBLE Hold; // This is just a dummy - we don't use it
off_t Offset; // Offset needed to read the element
size_t ElementSize = sizeof(DATEFLD);
if (getObjID(Fp) != objDLIST)
{
if (feof(Fp))
message_log (LOG_INFO, "Empty index: '%s'", Fn.c_str());
else
message_log (LOG_PANIC, "%s not a date index??", Fn.c_str());
*Index = -1;
return NO_MATCH;
}
Read(&Total,Fp);
Low = 0;
High = Total - 1;
X = High / 2;
DATEFLD Value, Lower, Upper;
do {
OX = X;
if ((X > 0) && (X < High)) {
Offset = SIZEOF_HEADER + (Total+X-1) * ElementSize;
fseek(Fp, (long)Offset, SEEK_SET);
Type=INSIDE;
} else if (X <= 0) {
Offset = SIZEOF_HEADER + (Total+X) * ElementSize;
fseek(Fp, (long)Offset, SEEK_SET);
Type=AT_START;
} else if (X >= High) {
Offset = SIZEOF_HEADER + (Total+X-1) * ElementSize;
fseek(Fp, (long)Offset, SEEK_SET);
Type=AT_END;
}
if (Type != AT_START)
Read(&Lower, Fp);
Read(&Value, Fp);
// If we're at the start, we need to read the first value into
// NumericValue, but we don't want to leave LowerBound
// uninitialized. This will also handle the case when we only
// have two values in the index.
if (Type == AT_START)
Lower = Value;
if(Type != AT_END)
Read(&Upper,Fp);
// Similarly, if we're at the end and can't read in a new value
// for UpperBound, we don't want it uninitialized, either.
if (Type == AT_END)
Upper = Value;
State = Matcher(Key,
Lower.GetGlobalStart(),
Value.GetGlobalStart(),
Upper. GetGlobalStart(),
Relation, Type);
if (State == MATCH) {
// We got a match
//cerr << "===========" << endl << "MATCH:" << endl;
//cerr << "Key = " << Key << endl;
//cerr << "Lower = " << Lower.GetGlobalStart() << endl;
//cerr << "Value = " << Value.GetGlobalStart() << endl;
//cerr << "Upper = " << Upper.GetGlobalStart() << endl;
fclose(Fp);
*Index = X;
return MATCH;
} else if ((State == TOO_HIGH) && (Type == AT_END)) {
// We didn't get a match, but we ran off the upper end, so
// the key is bigger than anything indexed
fclose(Fp);
*Index = -1;
return State;
} else if ((State == TOO_LOW) && (Type == AT_START)) {
// We didn't get a match, but we ran off the lower end, so
// the key is smaller than anything indexed
fclose(Fp);
*Index = -1;
return State;
} else if (Low >= High) {
// If Low is >= High, there aren't any more values to check
// so we're done whether we got a match or not, and if we got
// here, there wasn't a match. This probably won't happen -
// at least, we expect that these conditions will be caught
// by one of the preceeding, but it pays to be safe.
fclose(Fp);
*Index = -1;
return NO_MATCH;
}
if (State == TOO_LOW) {
High = X;
} else {
Low = X + 1;
}
X = (Low + High) / 2;
if (X < 0) {
X = 0;
} else {
if (X >= Total) {
X = Total - 1;
}
}
} while (X != OX);
}
fclose(Fp);
*Index = -1;
return NO_MATCH;
}
void DATELIST::Dump(ostream& os) const
{
for(_Count_t x=0; x<Count; x++)
table[x].Dump(os);
}
void DATELIST::Dump(INT4 start, INT4 end, ostream& os) const
{
if (start < 0)
start = 0;
if (end > Count)
end = Count;
for(INT x=start; x<end; x++)
table[x].Dump(os);
}
size_t DATELIST::LoadTable(INT4 Start, SRCH_DATE date)
{
off_t MoveTo = SIZEOF_HEADER + (Start*sizeof(DATEFLD));
size_t nRecs = 0;
_Count_t nCount;
FILE *fp;
if ((fp=fopen(FileName,"rb")) != NULL) {
DATEFLD datefield;
SRCH_DATE sdate;
Read(&nCount, fp);
fseek(fp, MoveTo, SEEK_SET);
errno = 0;
Resize(Count+ nCount-Start +1);
for (size_t i=Start; i<nCount; i++) {
if (feof(fp))
break;
::Read(&datefield, fp);
sdate = datefield.GetValue();
if (sdate < date)
continue;
if (sdate > date)
{
break;
}
if(Count == MaxEntries)
Expand();
table[Count++] = datefield;
nRecs++;
}
fclose(fp);
}
return nRecs;
}
size_t DATELIST::LoadTable(INT4 Start, INT4 End)
{
FILE *fp;
size_t nRecs=0;
if ( FileName.IsEmpty() ) {
message_log (LOG_PANIC, "DATELIST::LoadTable: FileName not set");
return 0;
}
const long Elements = (long)(GetFileSize(FileName)/sizeof(DATEFLD));
if (Elements == 0)
{
// message_log (LOG_WARN, "DATELIST: '%s' is empty!", FileName.c_str());
return nRecs;
}
if (Start > Elements)
{
message_log (LOG_WARN, "DATELIST: Start %d > element count (%ld). Nothing to load.",
(int)Start, Elements);
return nRecs;
}
if ((fp=fopen(FileName,"rb")) != NULL) {
if (Start == -1) Start=0;
if ((End == -1) || (End>=Elements) ) End = (INT4)(Elements - 1);
if (Start > 0)
if (fseek(fp, (off_t)Start*sizeof(DATEFLD), SEEK_SET) == -1)
message_log (LOG_ERRNO, "DATELIST: Seek error on '%s'", FileName.c_str());
Resize(Count + End-Start + 1); // Make sure there is some room
errno = 0;
for (size_t i=Start;i<=End;i++){
if (feof(fp))
{
message_log (
(errno ? LOG_ERRNO : LOG_ERROR),
"Premature date list read-failure in '%s' [%d into (%d,%d) of %ld]",
FileName.c_str(), (int)i, (int)Start, (int)End, Elements);
break;
}
::Read(&table[Count], fp);
//cerr << "Read [" << Count << "] " << table[Count] << endl;
nRecs++;
if(++Count==MaxEntries)
Expand();
}
fclose(fp);
} else
message_log (LOG_ERROR, "Could not open '%s'", FileName.c_str());
return nRecs;
}
size_t DATELIST::LoadTable(INT4 Start, INT4 End, NumBlock Offset)
{
// Start is the starting index (i.e., 0 based), and End is the ending
// index, so this will load End-Start+1 items into table[Start] through
// table[End]
//
// Return the actual number of items read in this load
size_t nRecs = 0;
if (FileName.IsEmpty() ) {
message_log (LOG_PANIC, "Numeric List FileName not set");
return 0;
}
if (GetFileSize(FileName) == 0) return 0; // Empty index
FILE *fp = fopen(FileName,"rb");
if (fp) {
// Bump past Count, then offset to the right starting point
// Note - there are two tables in the file - one sorted by starting
// value, one sorted by ending value. There are Count entries in each
// version.
_Count_t nCount;
off_t MoveTo;
if (getObjID(fp)!= objDLIST)
{
fclose(fp);
message_log (LOG_PANIC, "%s not a date list??", FileName.c_str());
return 0;
}
Read(&nCount, fp);
if ((End >= nCount) || (End < 0))
End = nCount-1;
if (Start < 0)
Start = 0;
//if (Start>0) Start--; //@@@@??
if ((Start < nCount) && !feof(fp)) {
if (Offset == VAL_BLOCK) {
MoveTo = SIZEOF_HEADER + ((Start)*sizeof(DATEFLD));
} else if (Offset == GP_BLOCK) {
MoveTo = SIZEOF_HEADER + ((nCount+Start)*sizeof(DATEFLD));
} else {
MoveTo = 0;
}
#if DEBUG
cerr << "DATELIST: Moving " << MoveTo << " bytes into the file and reading "
<< nCount << " elements starting at table[" << Count << "]"
<< endl;
#endif
if (MoveTo != SIZEOF_HEADER)
if (fseek(fp, MoveTo, SEEK_SET) == -1)
message_log (LOG_ERRNO, "Can't seek to %ld in '%s'", (long)MoveTo, FileName.c_str());
Resize(Count + End-Start + 1);
errno = 0;
for (size_t i=Start; i<=End; i++) {
if (feof(fp))
{
message_log (LOG_ERRNO, "Premature date list read-failure in '%s' [%d in (%d,%d)]",
FileName.c_str(), i, Start, End);
break;
}
::Read(&table[Count], fp);
#if DEBUG
cerr << "table[" << Count << "] = " << table[Count] << endl;
#endif
nRecs++;
if(++Count == MaxEntries)
Expand();
}
}
fclose(fp);
} else
message_log (LOG_ERROR, "Could not open '%s'", FileName.c_str());
#if DEBUG
cerr << "returning " << nRecs << endl;
#endif
return nRecs;
}
void DATELIST::WriteTable()
{
FILE *fp;
if ( FileName.IsEmpty() )
return;
if ((fp=fopen(FileName,"wb")) != NULL)
{
for(_Count_t x=0; x<Count; x++)
Write(table[x], fp);
fclose(fp);
}
}
void DATELIST::WriteTable(INT Offset)
{
FILE *fp;
if ( FileName.IsEmpty())
return;
// Offset = 0 is start of file
if ((fp = fopen (FileName, (Offset == 0) ? "wb" : "a+b")) != NULL)
{
// First, write out the count
if (Offset == 0)
{
// Write the header
putObjID(objDLIST, fp);
Write((_Count_t)Count, fp);
}
// Now, go to the specified offset and write the table
off_t MoveTo = (off_t)(Offset*sizeof(DATEFLD)) + SIZEOF_HEADER;
#if DEBUG
cerr << Count << " elements, Offsetting " << MoveTo << " bytes into the file." << endl;
#endif
if (MoveTo != SIZEOF_HEADER)
{
if (fseek(fp, MoveTo, SEEK_SET) == -1)
message_log (LOG_ERRNO, "Can't seek to %ld in '%s'", (long)MoveTo, FileName.c_str());
}
for (_Count_t x=0; x<Count; x++)
{
#if DEBUG
cerr << " write: " << table[x] << endl;
#endif
Write(table[x], fp);
}
fclose(fp);
}
}
DATELIST::~DATELIST()
{
if (table)
delete [] table;
}
void DATELIST::WriteIndex(const STRING& Fn)
{
#if DEBUG
cerr << "WriteIndex(" << Fn << ")" << endl;
#endif
SetFileName(Fn);
LoadTable(0,-1);
#if DEBUG
cerr << "Count = " << Count << endl;
#endif
if (Count > 0)
{
Sort();
WriteTable(0);
SortByGP();
WriteTable(Count);
}
}
// Reduce the two tables down to one to allow for a simple
// write append
//
FILE *DATELIST::OpenForAppend(const STRING& Fn)
{
// New file?
if (GetFileSize(Fn) == 0)
return fopen(Fn, "wb");
FILE *Fp = fopen(Fn, "rb"); // Read and write
if (Fp == NULL)
{
message_log (LOG_ERRNO, "DATELIST:: Can't open '%s' for reading", Fn.c_str());
return NULL;
}
if (getObjID(Fp)!= objDLIST)
{
#if DEBUG
cerr << "OpenForAppend = a+b...." << endl;
#endif
fclose(Fp);
return fopen(Fn, "a+b"); // Append
}
_Count_t Total; // This MUST match the type of Count!!!
Read(&Total, Fp);
if (Total == 0)
{
// Nothing in there so
fclose(Fp);
return fopen(Fn, "wb"); // Can start from scratch
}
STRING TmpName = Fn + "~";
for (size_t i =0; FileExists(TmpName); i++)
{
TmpName.form ("%s.%d", Fn.c_str(), (int)i);
}
FILE *oFp = fopen(TmpName, "wb");
if (oFp == NULL)
{
// Fall into scatch (we might not have writing permission
char scratch[ L_tmpnam+1];
char *TempName = tmpnam( scratch );
message_log (LOG_WARN, "Could not create '%s', trying tmp '%s'", TmpName.c_str(),
TempName);
if ((oFp = fopen(TempName, "wb")) == NULL)
{
message_log (LOG_ERRNO, "Can't create a temporary list '%s'", Fn.c_str());
fclose(Fp);
return NULL;
}
TmpName = TempName; // Set it
}
// Copy over
DATEFLD fld;
for (_Count_t i=0; i< Total; i++)
{
fld.Read(Fp);
#if DEBUG
cerr << "DATELIST [" << i << "] = " << fld << endl;
#endif
fld.Write(oFp);
}
fclose(Fp);
fclose(oFp);
if (::remove(Fn) == -1)
message_log (LOG_ERRNO, "Can't remove '%s'", Fn.c_str());
if (RenameFile(TmpName, Fn) == -1)
message_log (LOG_ERRNO, "Can't rename '%s' to '%s'", TmpName.c_str(), Fn.c_str());
// Now open for append
if ((Fp = fopen(Fn, "a+b")) == NULL)
message_log (LOG_ERRNO, "Could not open '%s' for date list append", Fn.c_str());
else
message_log (LOG_DEBUG, "Opening '%s' for date list append", Fn.c_str());
return Fp;
}
|
-- Convert schema './Tapper-Schema-ReportsDB-2.010013-MySQL.sql' to 'Tapper::Schema::ReportsDB v2.010015':
BEGIN;
ALTER TABLE reportfile CHANGE COLUMN filecontent filecontent LONGBLOB NOT NULL DEFAULT '';
ALTER TABLE reportsection CHANGE COLUMN language_description language_description text;
COMMIT;
|
---
'@backstage/plugin-techdocs': patch
---
Handle URLs with a `#hash` correctly when rewriting link URLs.
|
<?php
namespace JTL\Extensions\Upload;
use JTL\DB\ReturnType;
use JTL\Nice;
use JTL\Shop;
use stdClass;
/**
* Class Scheme
* @package JTL\Extensions\Upload
*/
final class Scheme
{
/**
* @var int
*/
public $kUploadSchema;
/**
* @var int
*/
public $kCustomID;
/**
* @var int
*/
public $nTyp;
/**
* @var string
*/
public $cName;
/**
* @var string
*/
public $cBeschreibung;
/**
* @var string
*/
public $cDateiTyp;
/**
* @var int
*/
public $nPflicht;
/**
* @var bool
*/
private $licenseOK;
/**
* Scheme constructor.
* @param int $id
*/
public function __construct(int $id = 0)
{
$this->licenseOK = self::checkLicense();
if ($id > 0 && $this->licenseOK === true) {
$this->loadFromDB($id);
}
}
/**
* @return bool
*/
public static function checkLicense(): bool
{
return Nice::getInstance()->checkErweiterung(\SHOP_ERWEITERUNG_UPLOADS);
}
/**
* @param int $id
*/
private function loadFromDB(int $id): void
{
$upload = Shop::Container()->getDB()->queryPrepared(
'SELECT tuploadschema.kUploadSchema, tuploadschema.kCustomID, tuploadschema.nTyp,
tuploadschema.cDateiTyp, tuploadschema.nPflicht, tuploadschemasprache.cName,
tuploadschemasprache.cBeschreibung
FROM tuploadschema
LEFT JOIN tuploadschemasprache
ON tuploadschemasprache.kArtikelUpload = tuploadschema.kUploadSchema
AND tuploadschemasprache.kSprache = :lid
WHERE kUploadSchema = :uid',
[
'lid' => Shop::getLanguageID(),
'uid' => $id
],
ReturnType::SINGLE_OBJECT
);
if (isset($upload->kUploadSchema) && (int)$upload->kUploadSchema > 0) {
self::copyMembers($upload, $this);
}
}
/**
* @return int
* @deprecated since 5.0.0
*/
public function save(): int
{
\trigger_error(__METHOD__ . ' is deprecated.', \E_USER_DEPRECATED);
return 0;
}
/**
* @return int
* @deprecated since 5.0.0
*/
public function update(): int
{
\trigger_error(__METHOD__ . ' is deprecated.', \E_USER_DEPRECATED);
return 0;
}
/**
* @return int
* @deprecated since 5.0.0
*/
public function delete(): int
{
\trigger_error(__METHOD__ . ' is deprecated.', \E_USER_DEPRECATED);
return 0;
}
/**
* @param int $kCustomID
* @param int $type
* @return stdClass[]
*/
public function fetchAll(int $kCustomID, int $type): array
{
if (!$this->licenseOK) {
return [];
}
$sql = $type === \UPLOAD_TYP_WARENKORBPOS
? ' AND kCustomID = ' . $kCustomID
: '';
return Shop::Container()->getDB()->queryPrepared(
'SELECT tuploadschema.kUploadSchema, tuploadschema.kCustomID, tuploadschema.nTyp,
tuploadschema.cDateiTyp, tuploadschema.nPflicht,
IFNULL(tuploadschemasprache.cName,tuploadschema.cName ) cName,
IFNULL(tuploadschemasprache.cBeschreibung, tuploadschema.cBeschreibung) cBeschreibung
FROM tuploadschema
LEFT JOIN tuploadschemasprache
ON tuploadschemasprache.kArtikelUpload = tuploadschema.kUploadSchema
AND tuploadschemasprache.kSprache = :lid
WHERE nTyp = :tpe' . $sql,
['tpe' => $type, 'lid' => Shop::getLanguageID()],
ReturnType::ARRAY_OF_OBJECTS
);
}
/**
* @param object $objFrom
* @param object|null $objTo
* @return null|object
*/
private static function copyMembers($objFrom, &$objTo = null)
{
if (!\is_object($objTo)) {
$objTo = new stdClass();
}
foreach (\array_keys(\get_object_vars($objFrom)) as $member) {
$objTo->$member = $objFrom->$member;
}
return $objTo;
}
}
|
# frozen_string_literal: true
module Paperclip
# This module contains all the methods that are available for interpolation
# in paths and urls. To add your own (or override an existing one), you
# can either open this module and define it, or call the
# Paperclip.interpolates method.
module Interpolations
extend self
# Hash assignment of interpolations. Included only for compatibility,
# and is not intended for normal use.
def self.[]= name, block
define_method(name, &block)
end
# Hash access of interpolations. Included only for compatibility,
# and is not intended for normal use.
def self.[] name
method(name)
end
INTERPOLATION_REGEXP = /:\w+/
# Perform the actual interpolation. Takes the pattern to interpolate
# and the arguments to pass, which are the attachment and style name.
# You can pass a method name on your record as a symbol, which should turn
# an interpolation pattern for Paperclip to use.
def self.interpolate(pattern, attachment, *args)
pattern = attachment.instance.send(pattern) if pattern.kind_of? Symbol
pattern.gsub(INTERPOLATION_REGEXP) do |match|
method = match[1..-1]
respond_to?(method) ? public_send(method, attachment, *args) : match
end
end
def self.plural_cache
@plural_cache ||= PluralCache.new
end
# Returns the filename, the same way as ":basename.:extension" would.
def filename attachment, style_name
"#{basename(attachment, style_name)}.#{extension(attachment, style_name)}"
end
# This interpolation is used in the default :path to ease default specifications.
# So it just interpolates :url template without checking if preocessing and
# file existence.
def url attachment, style_name
interpolate(attachment.class.url_template, attachment, style_name)
end
# Returns the timestamp as defined by the <attachment>_updated_at field
def timestamp attachment, style_name
attachment.instance_read(:updated_at).to_s
end
# Returns the Rails.root constant.
def rails_root attachment, style_name
Rails.root
end
# Returns the Rails.env constant.
def rails_env attachment, style_name
Rails.env
end
# Returns the underscored, pluralized version of the class name.
# e.g. "users" for the User class.
# NOTE: The arguments need to be optional, because some tools fetch
# all class names. Calling #class will return the expected class.
def class attachment = nil, style_name = nil
return super() if attachment.nil? && style_name.nil?
plural_cache.underscore_and_pluralize_class(attachment.instance.class)
end
# Returns the basename of the file. e.g. "file" for "file.jpg"
def basename attachment, style_name
File.basename(attachment.original_filename, ".*")
end
# Returns the extension of the file. e.g. "jpg" for "file.jpg"
# If the style has a format defined, it will return the format instead
# of the actual extension.
def extension attachment, style_name
((style_name = attachment.styles[style_name]) && style_name[:format]) ||
File.extname(attachment.original_filename)[1..-1] || ''
end
# Returns the id of the instance.
def id attachment, style_name
attachment.instance.id
end
# Returns the id of the instance in a split path form. e.g. returns
# 000/001/234 for an id of 1234.
def id_partition attachment, style_name
("%09d" % attachment.instance.id).scan(/\d{3}/).join("/")
end
# Returns the pluralized form of the attachment name. e.g.
# "avatars" for an attachment of :avatar
def attachment attachment, style_name
plural_cache.pluralize_symbol(attachment.name)
end
# Returns the style, or the default style if nil is supplied.
def style attachment, style_name
style_name || attachment.default_style
end
end
end
|
require 'forwardable'
module Ripgrep
class Client
extend Forwardable
def_delegators Core, :version, :help, :files
def initialize(verbose: false)
@verbose = verbose
end
def exec(*args, opts)
unless opts.is_a? Hash
args << opts
opts = {}
end
verbose = opts[:verbose].nil? ? @verbose : !!opts[:verbose]
cli_options = opts[:options]&.map do |key, val|
next unless val
val = '' if val.is_a? TrueClass
val = val.join if val.is_a? Array
key = key.to_s.tr('_', '-')
"--#{key} #{val}".strip
end&.compact || []
puts "cli_options: #{cli_options}" if verbose
cli_arguments = cli_options + args
cli_arguments << (opts[:path] || '.')
puts "cli_arguments: #{cli_arguments}" if verbose
Core.exec(*cli_arguments, verbose: verbose)
end
def run(&block)
instance_eval(&block)
end
private
def rg(*args)
return self if args.empty?
exec(*args, verbose: @verbose)
end
end
end
|
## Changelog (Current version: 0.9.3)
-----------------
### 0.9.3 (2018 May 04)
* [630c83b] Prepare for 0.9.3
* [71c1702] proper indentation (#2)
### 0.9.2 (2018 Feb 13)
* [a33d6bd] Prepare for 0.9.2
### 0.9.1 (2018 Feb 12)
* [46c5d38] Prepare for 0.9.1
* [6bfaf25] Merge pull request #1 from bitrise-steplib/viktorbenei-patch-1
* [0344e13] password input title enhancement
-----------------
Updated: 2018 May 04
|
module Test.Util
( module Test.Util
, module Ex
, HC.Address
, T.Text
, BS.ByteString
)
where
import qualified Servant as S
import Control.Monad.IO.Class as Ex
import Data.Word as Ex
import Data.String.Conversions as Ex
import Data.Either as Ex
import Data.Maybe as Ex
import qualified Data.Text as T
import Control.Monad as Ex
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified Network.Haskoin.Crypto as HC
import qualified Network.Haskoin.Script as HS
import qualified Servant.API.ResponseHeaders as H
import System.Environment (lookupEnv)
import Text.Read (readMaybe)
envReadPort :: IO (Maybe Word)
envReadPort = maybe Nothing readMaybe <$> lookupEnv "PORT"
binaryHeader :: a -> H.Headers '[S.Header "Content-Transfer-Encoding" String] a
binaryHeader = H.addHeader "binary"
userErr :: BS.ByteString -> S.Handler a
userErr msg = S.throwError $ S.err404 { S.errBody = BL.fromStrict msg }
-- Bitcoin
addressToScript :: HC.Address -> HS.ScriptOutput
addressToScript addr =
case addr of
a@(HC.PubKeyAddress _) -> HS.PayPKHash a
a@(HC.ScriptAddress _) -> HS.PayScriptHash a
addressScriptBS :: HC.Address -> BS.ByteString
addressScriptBS = HS.encodeOutputBS . addressToScript
|
import styled from 'styled-components';
import React, { FC } from 'react';
import { Button, CenterButtonText, ButtonProps } from './Buttons';
export const StyledLoginBtn = styled(Button)`
background: ${({ background }: ButtonProps) => background || 'white'};
color: ${({ color }: ButtonProps) => color || 'black'};
flex-shrink: 0;
font-family: 'Roboto';
font-size: 1rem;
outline: 0 !important;
&:hover,
&:focus,
&:active {
box-shadow: 0px 0px 20px 0px
${({ glowColor = 'RGBA(0, 0, 0, 255, 0.67)' }: ButtonProps) => glowColor};
}
`;
const TextButton: FC<ButtonProps> = ({
children,
onClick,
fontWeight,
fontSize,
color,
...props
}: ButtonProps) => (
<StyledLoginBtn onClick={onClick} {...props}>
<CenterButtonText {...{ color, fontSize, fontWeight }}>{children}</CenterButtonText>
</StyledLoginBtn>
);
export default TextButton;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.