{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n\n"}}},{"rowIdx":2027,"cells":{"text":{"kind":"string","value":"import { CloudFrontToS3 } from \"@aws-solutions-constructs/aws-cloudfront-s3\";\nimport {\n aws_certificatemanager,\n aws_route53,\n aws_route53_targets,\n aws_s3_deployment,\n} from \"aws-cdk-lib\";\nimport { Construct } from \"constructs\";\n\ninterface Props {\n hostedZone: aws_route53.IPublicHostedZone;\n certificate: aws_certificatemanager.ICertificate;\n}\n\nexport class AppDistributionConstruct extends Construct {\n readonly hostedZone: aws_route53.IPublicHostedZone;\n constructor(scope: Construct, id: string, props: Props) {\n super(scope, id);\n\n const { hostedZone, certificate } = props;\n\n const appDistributionDomainName = `app.${hostedZone.zoneName}`;\n\n const { s3Bucket: appBucket, cloudFrontWebDistribution: appDistribution } =\n new CloudFrontToS3(this, \"AppDistribution\", {\n insertHttpSecurityHeaders: false,\n cloudFrontDistributionProps: {\n certificate,\n domainNames: [appDistributionDomainName],\n },\n });\n\n new aws_s3_deployment.BucketDeployment(this, \"AppDeployment\", {\n destinationBucket: appBucket!,\n distribution: appDistribution!,\n sources: [aws_s3_deployment.Source.asset(\"sources/app/build\")],\n });\n\n new aws_route53.ARecord(this, \"AppRecord\", {\n zone: hostedZone,\n recordName: `app.${hostedZone.zoneName}`,\n target: aws_route53.RecordTarget.fromAlias(\n new aws_route53_targets.CloudFrontTarget(appDistribution)\n ),\n });\n }\n}\n"}}},{"rowIdx":2028,"cells":{"text":{"kind":"string","value":"package uk.co.appsbystudio.geoshare.friends.manager\n\ninterface FriendsManagerPresenter {\n\n fun friends()\n\n fun viewpagerItem(item: Int)\n\n fun search()\n\n fun invalidSession()\n\n fun stop()\n}"}}},{"rowIdx":2029,"cells":{"text":{"kind":"string","value":"input('dep').' '.$request->input('arr').\n ' '.$request->input('debut').' '.$request->input('fin').\n ' '.$request->input('sel'), $output, $ret_code);\n $f=fopen(\"b.txt\",\"r\");\n $res=fgets($f);\n return view('result',['res'=>$res]);\n\n\n }\n}\n"}}},{"rowIdx":2030,"cells":{"text":{"kind":"string","value":"//Autogenerated by SSDCPortal.EntityGenerator\nusing SSDCPortal.Constants;\nusing System;\nusing System.Collections.Generic;\nusing System.ComponentModel;\n\nnamespace SSDCPortal.Shared.DataInterfaces\n{\n public interface IMessage\n {\n Int32 Id { get; set; }\n\n String UserName { get; set; }\n\n String Text { get; set; }\n\n DateTime When { get; set; }\n\n Guid UserID { get; set; }\n\n IApplicationUser Sender { get; set; }\n\n }\n}\n"}}},{"rowIdx":2031,"cells":{"text":{"kind":"string","value":"-- @testpoint:opengauss关键字command_function_code(非保留),作为角色名\n\n\n--关键字不带引号-成功\ndrop role if exists command_function_code;\ncreate role command_function_code with password 'gauss@123' valid until '2020-12-31';\n\n--关键字带双引号-成功\ndrop role if exists \"command_function_code\";\ncreate role \"command_function_code\" with password 'gauss@123' valid until '2020-12-31';\n\n--关键字带单引号-合理报错\ndrop role if exists 'command_function_code';\ncreate role 'command_function_code' with password 'gauss@123' valid until '2020-12-31';\n\n--关键字带反引号-合理报错\ndrop role if exists `command_function_code`;\ncreate role `command_function_code` with password 'gauss@123' valid until '2020-12-31';\n"}}},{"rowIdx":2032,"cells":{"text":{"kind":"string","value":"package ru.otus.otuskotlin.marketplace.backend.repo.dynamo\n\nimport ru.otus.otuskotlin.marketplace.backend.repo.test.*\nimport java.util.*\n\n/*\nНикогда так не делайте как в этом тесте.\n\nЗдесь используется боевая база данных dynamoDB с тестовыми таблицами.\n\nВ итоге, таблицы создаются после каждой сборки. Если их не подчищать, может набежать довольно крупная сумма за\nиспользование dynamoDB в AWS.\n\nТестирование необходимо выполнять с помощью тестового Docker-образа dynamoDb:\nhttps://hub.docker.com/r/amazon/dynamodb-local\n\n */\n\n//class RepoAdDynamoCreateTest: RepoAdCreateTest() {\n// override val repo = RepoAdDynamo(initObjects = initObjects, table = \"test-${UUID.randomUUID()}\")\n//}\n//\n//class RepoAdDynamoReadTest: RepoAdReadTest() {\n// override val repo = RepoAdDynamo(initObjects = initObjects, table = \"test-${UUID.randomUUID()}\")\n//}\n//\n//class RepoAdDynamoUpdateTest: RepoAdUpdateTest() {\n// override val repo = RepoAdDynamo(initObjects = initObjects, table = \"test-${UUID.randomUUID()}\")\n//}\n//\n//class RepoAdDynamoDeleteTest: RepoAdDeleteTest() {\n// override val repo = RepoAdDynamo(initObjects = initObjects, table = \"test-${UUID.randomUUID()}\")\n//}\n//\n//// Не проходит с глобальными индексами, индекс не успевает обновиться\n//class RepoAdDynamoSearchTest: RepoAdSearchTest() {\n// override val repo = RepoAdDynamo(initObjects = initObjects, table = \"test-${UUID.randomUUID()}\")\n//}\n"}}},{"rowIdx":2033,"cells":{"text":{"kind":"string","value":"package com.foryouandme.data.repository.auth.network.request\n\nimport com.squareup.moshi.Json\n\ndata class LoginRequest(@Json(name = \"user\") val user: T)\n\ndata class PhoneLoginRequest(\n @Json(name = \"phone_number\") val phoneNumber: String,\n @Json(name = \"verification_code\") val verificationCode: String\n)\n\ndata class PinLoginRequest(\n @Json(name = \"email\") val pin: String,\n)"}}},{"rowIdx":2034,"cells":{"text":{"kind":"string","value":"package br.charles.repository;\n\nimport org.springframework.data.domain.Page;\nimport org.springframework.data.domain.Pageable;\nimport org.springframework.data.jpa.repository.Query;\nimport org.springframework.data.repository.PagingAndSortingRepository;\nimport org.springframework.data.repository.query.Param;\nimport org.springframework.stereotype.Repository;\n\nimport br.charles.model.Contato;\n\n@Repository\npublic interface ContatoRepository extends PagingAndSortingRepository {\n\n public Page findAll(Pageable pageable);\n\n @Query(\"SELECT p FROM Contato p \"\n + \"WHERE lower(nome) like %:busca% \")\n public Page busca(@Param(\"busca\") String busca, Pageable pageable);\n\n}\n"}}},{"rowIdx":2035,"cells":{"text":{"kind":"string","value":"import 'core/room.dart';\nimport 'options.dart';\n\n/// Main entry point to connect to a room.\n/// {@category Room}\nclass LiveKitClient {\n static const version = '1.0.0';\n\n /// Convenience method for connecting to a LiveKit server.\n /// Returns a [Room] upon a successful connect or throws when it fails.\n /// Alternatively, it is possible to instantiate [Room] and call [Room.connect] directly.\n static Future connect(\n String url,\n String token, {\n ConnectOptions? connectOptions,\n RoomOptions? roomOptions,\n }) async {\n final room = Room();\n try {\n await room.connect(\n url,\n token,\n connectOptions: connectOptions,\n roomOptions: roomOptions,\n );\n return room;\n } catch (error) {\n await room.dispose();\n rethrow;\n }\n }\n}\n"}}},{"rowIdx":2036,"cells":{"text":{"kind":"string","value":"/*\n * Copyright 2018 Nazmul Idris. All rights reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nobject GradlePlugins {\n data class Versions(val gradle: String = \"3.3.0\",\n val kotlin: String = \"1.3.20\",\n val junit5: String = \"1.2.0.0\")\n\n val versions = Versions()\n\n val gradle = \"com.android.tools.build:gradle:${versions.gradle}\"\n\n val kotlin = \"org.jetbrains.kotlin:kotlin-gradle-plugin:${versions.kotlin}\"\n\n val junit5 = \"de.mannodermaus.gradle.plugins:android-junit5:${versions.junit5}\"\n}\n\nobject Versions {\n val compile_sdk = 28\n val target_sdk = 26\n val min_sdk = 16\n}\n\nobject Deps {\n data class Versions(val arch_comp: String = \"2.0.0\",\n val design: String = \"1.0.0\",\n val gson: String = \"2.8.5\",\n val gms: String = \"16.0.0\",\n val places: String = \"1.0.0\",\n val dagger2: String = \"2.17\",\n val junit5: String = \"5.2.0\",\n val crayon: String = \"0.1.0\")\n\n val versions = Versions()\n\n val kotlin_stdlib_jdk8 =\n \"org.jetbrains.kotlin:kotlin-stdlib-jdk8:${GradlePlugins.versions.kotlin}\"\n\n val arch_comp = \"androidx.lifecycle:lifecycle-extensions:${versions.arch_comp}\"\n val arch_comp_annotation = \"androidx.lifecycle:lifecycle-compiler:${versions.arch_comp}\"\n\n val material_design = \"com.google.android.material:material:${versions.design}\"\n val vector_drawable = \"androidx.vectordrawable:vectordrawable:${versions.design}\"\n val recycler_view = \"androidx.recyclerview:recyclerview:${versions.design}\"\n\n val gms_places = \"com.google.android.libraries.places:places-compat:${versions.places}\"\n val gms_location = \"com.google.android.gms:play-services-location:${versions.gms}\"\n\n val gson = \"com.google.code.gson:gson:${versions.gson}\"\n\n val dagger2 = \"com.google.dagger:dagger:${versions.dagger2}\"\n val dagger2_annotation = \"com.google.dagger:dagger-compiler:${versions.dagger2}\"\n\n val crayon = \"com.importre:crayon:${versions.crayon}\"\n}\n\nobject TestingDeps {\n data class Versions(val assertj: String = \"3.11.1\",\n val junit5: String = \"5.2.0\",\n val mockk: String = \"1.8.9\",\n val roboelectric: String = \"3.8\",\n val junit4: String = \"4.12\")\n\n val versions = Versions()\n\n val junit5_jupiter = \"org.junit.jupiter:junit-jupiter-api:${versions.junit5}\"\n val junit5_jupiter_runtime = \"org.junit.jupiter:junit-jupiter-engine:${versions.junit5}\"\n val junit5_jupiter_params = \"org.junit.jupiter:junit-jupiter-params:${versions.junit5}\"\n val junit4_legacy = \"junit:junit:${versions.junit4}\"\n val junit5_vintage = \"org.junit.vintage:junit-vintage-engine:${versions.junit5}\"\n\n val assertj = \"org.assertj:assertj-core:${versions.assertj}\"\n\n val mockk = \"io.mockk:mockk:${versions.mockk}\"\n\n val roboelectric = \"org.robolectric:robolectric:${versions.roboelectric}\"\n}"}}},{"rowIdx":2037,"cells":{"text":{"kind":"string","value":"# Data Access 数据访问\n\n本单元创建于2015-10-22,用于存储C#中与数据有关的章节内容。\n\n其中将涉及以下几个方面的内容:\n\n* **文件系统数据**\n* **XML**\n* **LINQ简介**\n* **应用LINQ**"}}},{"rowIdx":2038,"cells":{"text":{"kind":"string","value":"package org.jim.common.cluster;\n\nimport java.util.UUID;\n\nimport org.jim.common.ImPacket;\n/**\n * 成员变量group, userid, ip谁有值就发给谁,toAll为true则发给所有
\n * packet是不允许为null的\n * @author WChao \n * 2018年05月20日 下午3:10:29\n */\npublic class ImClusterVo implements java.io.Serializable {\n\tprivate static final long serialVersionUID = 6978027913776155664L;\n\t\n\tpublic static final String CLIENTID = UUID.randomUUID().toString();\n\n\tprivate ImPacket packet;\n\n\tprivate String clientId = CLIENTID;\n\t\n\tprivate String group;\n\n\tprivate String userid;\n\t\n\tprivate String token;\n\t\n\tprivate String ip;\n\t\n\t/**\n\t * ChannelContext'id\n\t */\n\tprivate String channelId;\n\t\n\tprivate boolean toAll = false;\n\n\tpublic ImPacket getPacket() {\n\t\treturn packet;\n\t}\n\n\tpublic void setPacket(ImPacket packet) {\n\t\tthis.packet = packet;\n\t}\n\n\tpublic String getGroup() {\n\t\treturn group;\n\t}\n\n\tpublic void setGroup(String group) {\n\t\tthis.group = group;\n\t}\n\n\tpublic String getUserid() {\n\t\treturn userid;\n\t}\n\n\tpublic void setUserid(String userid) {\n\t\tthis.userid = userid;\n\t}\n\n\tpublic String getIp() {\n\t\treturn ip;\n\t}\n\n\tpublic void setIp(String ip) {\n\t\tthis.ip = ip;\n\t}\n\n\t/**\n\t * \n\t * @author: WChao\n\t */\n\tpublic ImClusterVo() {\n\t}\n\t\n\tpublic ImClusterVo(ImPacket packet) {\n\t\tthis.packet = packet;\n\t}\n\n\t/**\n\t * @param args\n\t * @author: WChao\n\t */\n\tpublic static void main(String[] args) {\n\n\t}\n\n\tpublic boolean isToAll() {\n\t\treturn toAll;\n\t}\n\n\tpublic void setToAll(boolean toAll) {\n\t\tthis.toAll = toAll;\n\t}\n\n\tpublic String getClientId() {\n\t\treturn clientId;\n\t}\n\n\tpublic void setClientId(String clientId) {\n\t\tthis.clientId = clientId;\n\t}\n\n\tpublic String getChannelId() {\n\t\treturn channelId;\n\t}\n\n\tpublic void setChannelId(String channelId) {\n\t\tthis.channelId = channelId;\n\t}\n\n\tpublic String getToken() {\n\t\treturn token;\n\t}\n\n\tpublic void setToken(String token) {\n\t\tthis.token = token;\n\t}\n}\n"}}},{"rowIdx":2039,"cells":{"text":{"kind":"string","value":"---\ntitle: JPA Relation 2\nauthor: Njade\ndate: 2020-12-16 00:25:00 +0900\ncategories: [JPA]\ntags: [JPA]\n---\n\n이 게시글은 인프런의 [김영한님의 강의](https://www.inflearn.com/course/ORM-JPA-Basic)를 보고 정리한 것입니다.\n실전 예제의 팁 등을 제외한 코드는 첨부하지 않습니다. 강의를 봐주세요.\n\n---\n\n## 다양한 연관관계 맵핑\n---\n* 연관관계 맵핑시 고려사항 3가지\n* 다대일 [N:1]\n* 일대다 [1:N]\n* 일대일 [1:1]\n* 다대다 [N:N]\n\n---\n\n## 연관관계 맵핑시 고려사항 3가지\n---\n### 다중성\n* 다대일: @ManyToOne\n* 일대다: @OneToMany\n* 일대일: @OneToOne\n* 다대다: @ManyToMany > 실무에서 사용하지 말 것\n\n### 단방향, 양방향\n* 테이블: 외래 키 하나로 양쪽 조인이 가능한 방향이라는 개념이 없음\n* 객체: 참조용 필드가 있는 쪽만 참조 가능, 한쪽만 참조하면 단방향, 양쪽 참조면 양방향\n\n### 연관관계의 주인\n* 테이블은 외래 키가 하나\n* 객체는 참조가 2군데\n* 두 객체 중 테이블의 외래 키를 관리할 곳을 정해야 함.\n* 외래 키를 관리하는 참조가 있는 곳이 주인\n* 주인이 아닌 곳은 조회만 가능\n* 다대일, 일대다 등에서 앞에 나오는 것이 주인\n\n---\n\n## 다대일 [N:1]\n---\n* 다대일 단방향상황에서 다대일 양방향으로의 확장은 테이블에 영향을 주지 않고 코드상으로만 추가가 가능\n* 외래키가 있는 쪽이 연관관계의 주인\n\n---\n\n## 일대다 [1:N]\n---\n* 1이 주인\n* 일반적으로 권장하지 않음.\n* 테이블에서 생각하면 N쪽에 무조건 외래키가 들어감.\n* 이 경우 1쪽 객체가 바뀌면 자신의 테이블이 아닌 다른 테이블로 sql이 실행되어 쿼리가 한 번 더 나감.\n* 객체와 테이블의 관계를 명확하게 파악하지 않으면 코드와 sql이 맵핑되지 않아 해석상의 어려움이 생길 수 있음.\n* 객체지향적으로는 살짝 부적절하더라도 DB설계에 맞춰 다대일 관계로 설계로 바꾸는 것이 좋음.\n* @JoinColumn을 꼭 사용하여야 하며 이를 사용하지 않으면 조인 테이블을 사용하게 됨. (테이블이 하나 더 생김.) \n* 일대다 양방향은 공식적으로 존재하지 않지만 사용은 가능하다.\n```java\n@ManyToOne\n@JoinColumn(insertable = false, updatable = false)\n```\n* 위 두개의 어노테이션을 통해 읽기 전용 필드를 사용해서 양방향처럼 사용하는 방법이다.\n\n---\n\n## 일대일 [1:1]\n---\n* 일대일은 반대도 일대일\n* 주 테이블이나 대상 테이블 중에 외래 키 선택이 가능\n* 외래 키에 DB에 유니크 제약조건이 추가되어야 한다.\n* 다대일과 유사함.\n* 외래키가 있는 곳이 연관관계의 주인\n* 반대편은 mappedBy 적용.\n* 일대일이지만 대상 테이블에 외래키가 있는 단방향의 경우 JPA가 지원해주지 않음.\n* 양방향인 경우에는 대상 테이블에 외래키가 있으면 가능하지만 사실 일대일 주 테이블의 양방향과 동일.\n\n### 주 테이블에 외래키\n* 개발상에서는 주 테이블에 외래키를 가지고 있는 경우 JPA 맵핑이 편리하고 추가 쿼리를 실행할 필요가 없어 이점이 있을 수 있다.\n* 값이 없으면 외래키에 null이 들어가는 등의 단점이 있을 수 있다.\n\n### 대상 테이블에 외래 키\n* DB관점에서 일대다가 되는 경우 테이블 구조가 유지될 수 있다.\n* 프록시 기능의 한계로 지연 로딩으로 설정하여도 어차피 쿼리해봐야 결과를 알 수 있기 때문에 항상 즉시 로딩된다.\n\n---\n\n## 다대다 [N:M]\n---\n* 실무에서는 사용하지 말 것.\n* RDB에서 정규화된 테이블은 다대다가 표현이 안됨.\n* 연결 테이블을 사용해서 일대다, 다대일 관계로 표현해야 함.\n* 객체는 컬렉션을 사용해서 객체 2개로 다대다 관계가 가능.\n* @ManyToMany, @JoinTable로 지정이 가능하며 단방향, 양방향이 가능하다.\n* 양방향은 동일하게 mappedBy를 사용해야함.\n* 실무에서는 연결 테이블이 연결만 하고 끝나는 일이 없고 추가정보가 꼭 들어가기 때문에 사용하지 않는 것이 좋음.\n* 연결 테이블용 엔티티를 만드는 것이 좋다.\n* 연결 테이블도 PK는 의미없는 sequence를 사용하는 것이 유연해질 수 있다.\n\n---\n\n## 실전 예제\n---\n* JPA는 parent와 같은 형태의 셀프 맵핑도 가능함.\n* 실무에서 중간 테이블은 단순하지 않으므로 @ManyToMany를 사용하지 말 것.\n* @JoinColumn은 외래키를 맵핑할 때 사용\n * name: 매핑할 외래 키 이름\n * referencedColumnName: 외래 키가 참조하는 대상 테이블의 컬럼명\n * foreignKey(DDL): 외래키 제약조건을 직접 지정\n * 이외에는 @Column과 동일\n* @ManyToOne\n * optional: 기본값 true\n * fetch: 기본값 EAGER\n * cascaed: 영속성 전이 기능\n * targetEntity: 연관된 엔티티의 타입 정보를 설정, 거의 사용하지 않음. 컬렉션 제네릭으로 타입 정보 추론.\n * 스펙상 mappedBy가 없음. > 무조건 주인이 되어야 함.\n* @OneToMany\n * mappedBy: 주인 필드 선택\n * fetch: 기본값 LAZY\n * cascaed: 영속성 전이 기능\n * targetEntity: 연관된 엔티티의 타입 정보를 설정, 거의 사용하지 않음. 컬렉션 제네릭으로 타입 정보 추론.\n \n---\n\n# Reference\n---\n- [인프런](https://www.inflearn.com/course/ORM-JPA-Basic)\n"}}},{"rowIdx":2040,"cells":{"text":{"kind":"string","value":"from pyspark.sql.functions import col\n\n\"\"\"Toy join function to showcase spark functions.\"\"\"\n\ndef join_dataframes(left, right, columns_left, columns_right, join_type='inner'):\n if len(columns_left) == len(columns_right) and len(columns_left) > 0:\n cond = [col(left_col) == col(right_col) for (left_col, right_col) in zip(columns_left, columns_right)]\n return left.join(right, cond, join_type)\n raise Error('Columns parameters don\\'t match or empty')\n"}}},{"rowIdx":2041,"cells":{"text":{"kind":"string","value":"---\ntitle: Good News\ndate: 2017-03-11 14:45:00 Z\ntags:\n- shopify\n- e-commerce\n- design\n- fashion\n- clothing\n- apparel\n- shoes\n- trainers\nimage: \"/uploads/255-goodnews@2x.jpg\"\nstore-link: https://goodnews.london\ncredit: Not\ncredit-link: http://not-studio.com\n---\n\n"}}},{"rowIdx":2042,"cells":{"text":{"kind":"string","value":"#!ruby\n\nstart_num = ARGV[0].hex\nend_num = ARGV[1].hex\nstart_num.upto(end_num) do |n|\n puts sprintf('0x%04x', n)\nend\n"}}},{"rowIdx":2043,"cells":{"text":{"kind":"string","value":"package xyz.gillall.demoapp.ui.pixabay.videogallery\n\nimport android.os.Bundle\nimport android.view.LayoutInflater\nimport android.view.View\nimport android.view.ViewGroup\nimport androidx.databinding.DataBindingUtil\nimport androidx.fragment.app.Fragment\nimport androidx.navigation.NavController\nimport androidx.navigation.fragment.NavHostFragment\nimport org.koin.androidx.viewmodel.ext.android.getViewModel\nimport xyz.gillall.demoapp.R\nimport xyz.gillall.demoapp.databinding.FragmentVideoGalleryBinding\n\nclass VideoGalleryFragment : Fragment() {\n\n private lateinit var binding: FragmentVideoGalleryBinding\n private lateinit var navController: NavController\n private lateinit var viewModel: VideoGalleryViewModel\n\n override fun onCreateView(\n inflater: LayoutInflater, container: ViewGroup?,\n savedInstanceState: Bundle?\n ): View {\n viewModel = getViewModel()\n binding = DataBindingUtil\n .inflate(inflater, R.layout.fragment_video_gallery, container, false)\n binding.root.context\n binding.lifecycleOwner = this\n binding.viewModel = viewModel\n viewModel.updateByViewModel.observe(viewLifecycleOwner, {\n when (it.action) {\n \"update\" -> binding.viewModel = viewModel\n }\n })\n navController = NavHostFragment.findNavController(this)\n\n return binding.root\n }\n}"}}},{"rowIdx":2044,"cells":{"text":{"kind":"string","value":"_Closure = \\Closure::fromCallable($closure);\n // $this->_Closure = new \\Closure::($closure);\n $this->_Closure = $closure;\n }\n\n /**\n * Allows to call the delegate object directly.\n *\n * @param list ...$args variable numbers of arguments.\n *\n * @return mixed\n */\n public function __invoke(...$args)\n {\n return call_user_func_array($this->_Closure, $args);\n }\n}\n\n/**\n * defines a type for event arguments.\n */\nclass EventArgs\n{\n protected $_Sender;\n\n /**\n * construct.\n *\n * @param mixed $sender\n */\n public function __construct($sender = null)\n {\n $this->_Sender = $sender;\n }\n\n /**\n * property-read.\n *\n * @return object should contain the event emitting object.\n */\n final public function Sender()\n {\n return $this->_Sender;\n }\n}\n\n/**\n * a basic event type for the delegate.\n */\nclass Event\n{\n private $_Receivers = array();\n\n /**\n * Undocumented function\n *\n * @param Delegate $delegate\n *\n * @return Event\n */\n final public function Add(Delegate $delegate)\n {\n $this->_Receivers[] = $delegate;\n return $this;\n }\n\n /**\n * fires the event.\n *\n * @param EventArgs $args\n *\n * @return void\n */\n final public function Trigger(EventArgs $args)\n {\n foreach ($this->_Receivers as $delegate) {\n $delegate($args);\n }\n }\n}\n\n// declare anonymous function as delegate.\n$myDelegate = new Delegate(function(EventArgs $args) {\n echo 'anonymous function' . PHP_EOL;\n});\n\n// declare event, assign the delegate, trigger event.\n$myEvent = new Event();\n$myEvent->Add($myDelegate);\n\n/**\n * Defines a simple type that can handle events.\n */\nclass DemoEventHandler\n{\n public function onEvent(EventArgs $args)\n {\n echo 'class event handler' . PHP_EOL;\n }\n}\n\n// test event handler\n$controller = new DemoEventHandler();\n$myEvent->Add(new Delegate(array($controller, 'onEvent')));\n$myEvent->Trigger(new EventArgs($myEvent));\n"}}},{"rowIdx":2045,"cells":{"text":{"kind":"string","value":"class ChangeSpaceObjCol < ActiveRecord::Migration[6.0]\n def change\n rename_column :album_space_objs, :object_id, :space_obj_id\n end\nend\n"}}},{"rowIdx":2046,"cells":{"text":{"kind":"string","value":"module OodJob\n # A class that handles the communication with a resource manager for\n # submitting/statusing/holding/deleting jobs\n # @abstract\n class Adapter\n # The root exception class that all {Adapter} exceptions inherit from\n class Error < StandardError; end\n\n # The cluster used in submitting, querying status, and controlling jobs\n # @return [OodCluster::Cluster] cluster to communicate with\n attr_reader :cluster\n\n # @param cluster [OodCluster::Cluster] cluster that job is submitted to\n def initialize(cluster:, **_)\n @cluster = cluster\n end\n\n # Submit a job with the attributes defined in the job template instance\n # @abstract Subclass is expected to implement {#submit}\n # @raise [NotImplementedError] if subclass did not define {#submit}\n # @example Submit job template to cluster\n # solver_id = OodJob::Job.submit(script: solver_script)\n # #=> \"1234.server\"\n # @example Submit job that depends on previous job\n # post_id = OodJob::Job.submit(\n # script: post_script,\n # afterok: solver_id\n # )\n # #=> \"1235.server\"\n # @param script [Script] script object that describes the script and\n # attributes for the submitted job\n # @param after [#to_s, Array<#to_s>] this job may be scheduled for execution\n # at any point after dependent jobs have started execution\n # @param afterok [#to_s, Array<#to_s>] this job may be scheduled for\n # execution only after dependent jobs have terminated with no errors\n # @param afternotok [#to_s, Array<#to_s>] this job may be scheduled for\n # execution only after dependent jobs have terminated with errors\n # @param afterany [#to_s, Array<#to_s>] this job may be scheduled for\n # execution after dependent jobs have terminated\n # @return [String] the job id returned after successfully submitting a job\n def submit(script:, after: [], afterok: [], afternotok: [], afterany: [])\n raise NotImplementedError, \"subclass did not define #submit\"\n end\n\n # Retrieve job info from the resource manager\n # @abstract Subclass is expected to implement {#info}\n # @raise [NotImplementedError] if subclass did not define {#info}\n # @param id [#to_s] the id of the job, otherwise get list of all jobs\n # running on cluster\n # @return [Info, Array] information describing submitted job\n def info(id: '')\n raise NotImplementedError, \"subclass did not define #info\"\n end\n\n # Retrieve job status from resource manager\n # @note Optimized slightly over retrieving complete job information from server\n # @abstract Subclass is expected to implement {#status}\n # @raise [NotImplementedError] if subclass did not define {#status}\n # @param id [#to_s] the id of the job\n # @return [Status] status of job\n def status(id:)\n raise NotImplementedError, \"subclass did not define #status\"\n end\n\n # Put the submitted job on hold\n # @abstract Subclass is expected to implement {#hold}\n # @raise [NotImplementedError] if subclass did not define {#hold}\n # @param id [#to_s] the id of the job\n # @return [void]\n def hold(id:)\n raise NotImplementedError, \"subclass did not define #hold\"\n end\n\n # Release the job that is on hold\n # @abstract Subclass is expected to implement {#release}\n # @raise [NotImplementedError] if subclass did not define {#release}\n # @param id [#to_s] the id of the job\n # @return [void]\n def release(id:)\n raise NotImplementedError, \"subclass did not define #release\"\n end\n\n # Delete the submitted job\n # @abstract Subclass is expected to implement {#delete}\n # @raise [NotImplementedError] if subclass did not define {#delete}\n # @param id [#to_s] the id of the job\n # @return [void]\n def delete(id:)\n raise NotImplementedError, \"subclass did not define #delete\"\n end\n\n private\n # Reduce an array to unique objects with count\n # [\"a\", \"a\", \"b\"] #=> {\"a\" => 2, \"b\" => 1}\n def uniq_array(ary)\n ary.group_by {|v| v}.each_with_object({}) {|(k, v), h| h[k] = v.size}\n end\n end\nend\n"}}},{"rowIdx":2047,"cells":{"text":{"kind":"string","value":"#!/usr/bin/perl\n# Copyright (c) 2021 Tom Hancocks\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\n# Trim function\nsub trim\n{\n\tmy $str = $_[0];\n\t$str =~ s/^\\s+|\\n+//g;\n\treturn $str;\n}\n\n# We need to extract certain information about the binary and the application\n# bundle that we are installing.\nmy ($bin_path) = @ARGV;\n\nif (!defined $bin_path) {\n\tdie(\"You must provide a mach-o binary.\");\n}\n\nmy $macos_path = trim(`dirname ${bin_path}`);\nmy $contents_path = trim(`dirname ${macos_path}`);\nmy $frameworks_path = trim(\"${contents_path}/Frameworks\");\n\n# Make sure the frameworks directory actually exists in the application bundle.\n`mkdir -p ${frameworks_path}`;\n\n# Setup a selection of functions that are responsible for moving files and\n# altering linking.\nsub install_name_tool\n{\n\tlocal ($path, $dylib_path) = ($_[0], $_[1]);\n\tlocal $dylib_name = trim(`basename ${dylib_path}`);\n\tlocal $dylib_link_path = \"\\@executable_path/../Frameworks/${dylib_name}\";\n\t`install_name_tool -change \"${dylib_path}\" \"${dylib_link_path}\" \"${path}\"`;\n}\n\nsub get_dylib_install_path\n{\n\tlocal ($dylib_name) = (trim(`basename $_[0]`));\n\treturn \"${frameworks_path}/${dylib_name}\";\n}\n\nsub copy_dylib\n{\n\tlocal ($dylib_path, $dylib_name) = ($_[0], get_dylib_install_path($_[0]));\n\t`cp -v ${dylib_path} ${dylib_install_path}`;\n\t`chmod 0755 ${dylib_install_path}`;\n\treturn $dylib_install_path;\n}\n\n# Setup a subroutine to handle the actual DYLIB installation. This is \n# unfortunately a recursive operation, as actual DYLIBs can reference other\n# DYLIBs.\nsub install_dylib\n{\n\tlocal $base = $_[0];\n\tlocal @result = split /\\n/, `otool -L ${base}`;\n\twhile (local $dylib = shift(@result)) {\n\t\tlocal $dylib_path = trim((split / /, $dylib)[0]);\n\t\tlocal $dylib_install_path = get_dylib_install_path($dylib_path);\n\n\t\tif ($dylib_install_path eq $base) {\n\t\t\tinstall_name_tool($dylib_install_path, $dylib_path);\n\t\t}\n\n\t\t# Check if the DYLIB is a user one (located in a brew install location)\n\t\telsif ((rindex $dylib_path, \"/usr/local\") == 0) {\n\t\t\tlocal $dylib_install_path = copy_dylib($dylib_path);\n\t\t\tprint(\"Installing DYLIB to ${dylib_install_path}\\n\");\n\t\t\tinstall_name_tool($base, $dylib_path);\n\t\t\tinstall_dylib($dylib_install_path);\n\t\t}\n\t}\n}\n\ninstall_dylib($bin_path);"}}},{"rowIdx":2048,"cells":{"text":{"kind":"string","value":"package m54tom55\n\nimport (\n\t\"testing\"\n\n\t\"github.com/gogo/protobuf/proto\"\n\t\"github.com/pkg/errors\"\n\t\"github.com/stackrox/rox/generated/storage\"\n\t\"github.com/stackrox/rox/pkg/testutils\"\n\t\"github.com/stretchr/testify/assert\"\n\t\"github.com/stretchr/testify/require\"\n\tbolt \"go.etcd.io/bbolt\"\n)\n\nvar (\n\t// Sections are not essential for the test\n\t// but are required for a policy to be valid.\n\tsections = []*storage.PolicySection{\n\t\t{\n\t\t\tPolicyGroups: []*storage.PolicyGroup{\n\t\t\t\t{\n\t\t\t\t\tFieldName: \"CVSS\",\n\t\t\t\t\tValues: []*storage.PolicyValue{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tValue: \">= 7.000000\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\texclusions = []*storage.Exclusion{\n\t\t{\n\t\t\tName: \"42\",\n\t\t},\n\t}\n\n\toriginalPolicies = []*storage.Policy{\n\t\t{\n\t\t\tId: \"0\",\n\t\t\tName: \"policy 0 with no whitelists\",\n\t\t\tPolicyVersion: oldVersion,\n\t\t\tPolicySections: sections,\n\t\t},\n\t\t{\n\t\t\tId: \"1\",\n\t\t\tName: \"policy 1 with a whitelist\",\n\t\t\tPolicyVersion: oldVersion,\n\t\t\tPolicySections: sections,\n\t\t\tWhitelists: exclusions,\n\t\t},\n\t\t{\n\t\t\tId: \"2\",\n\t\t\tName: \"policy 2 with both a whitelist and an exclusion\",\n\t\t\tPolicyVersion: oldVersion,\n\t\t\tPolicySections: sections,\n\t\t\tWhitelists: exclusions,\n\t\t\tExclusions: exclusions,\n\t\t},\n\t\t{\n\t\t\tId: \"3\",\n\t\t\tName: \"policy 3 with an exclusion but the old version\",\n\t\t\tPolicyVersion: oldVersion,\n\t\t\tPolicySections: sections,\n\t\t\tExclusions: exclusions,\n\t\t},\n\t\t{\n\t\t\tId: \"4\",\n\t\t\tName: \"policy 4 with an exclusion and the new version\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t\tExclusions: exclusions,\n\t\t},\n\t\t{\n\t\t\tId: \"5\",\n\t\t\tName: \"policy 5 with no exclusion and and the new version\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t},\n\t\t{\n\t\t\tId: \"6\",\n\t\t\tName: \"policy 6 with a whitelist and the new version\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t\tWhitelists: exclusions,\n\t\t},\n\t}\n\n\texpectedPolicies = []*storage.Policy{\n\t\t{\n\t\t\tId: \"0\",\n\t\t\tName: \"policy 0 with no whitelists\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t},\n\t\t{\n\t\t\tId: \"1\",\n\t\t\tName: \"policy 1 with a whitelist\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t\tExclusions: exclusions,\n\t\t},\n\t\t{\n\t\t\tId: \"2\",\n\t\t\tName: \"policy 2 with both a whitelist and an exclusion\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t\tExclusions: append(exclusions, exclusions...),\n\t\t},\n\t\t{\n\t\t\tId: \"3\",\n\t\t\tName: \"policy 3 with an exclusion but the old version\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t\tExclusions: exclusions,\n\t\t},\n\t\t{\n\t\t\tId: \"4\",\n\t\t\tName: \"policy 4 with an exclusion and the new version\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t\tExclusions: exclusions,\n\t\t},\n\t\t{\n\t\t\tId: \"5\",\n\t\t\tName: \"policy 5 with no exclusion and and the new version\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t},\n\t\t{\n\t\t\tId: \"6\",\n\t\t\tName: \"policy 6 with a whitelist and the new version\",\n\t\t\tPolicyVersion: newVersion,\n\t\t\tPolicySections: sections,\n\t\t\tExclusions: exclusions,\n\t\t},\n\t}\n)\n\nfunc TestPolicyMigration(t *testing.T) {\n\tdb := testutils.DBForT(t)\n\n\terr := db.Update(func(tx *bolt.Tx) error {\n\t\tbucket, err := tx.CreateBucket(policyBucket)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tfor _, policy := range originalPolicies {\n\t\t\tbytes, err := proto.Marshal(policy)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := bucket.Put([]byte(policy.GetId()), bytes); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\trequire.NoError(t, err, \"Prepare test policy bucket\")\n\n\terr = migrateWhitelistsToExclusions(db)\n\trequire.NoError(t, err, \"Run migration\")\n\n\tvar migratedPolicies []*storage.Policy\n\terr = db.View(func(tx *bolt.Tx) error {\n\t\tbucket := tx.Bucket(policyBucket)\n\t\tif bucket == nil {\n\t\t\treturn errors.Errorf(\"bucket %q not found\", policyBucket)\n\t\t}\n\t\treturn bucket.ForEach(func(_, obj []byte) error {\n\t\t\tpolicy := &storage.Policy{}\n\t\t\tif err := proto.Unmarshal(obj, policy); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tmigratedPolicies = append(migratedPolicies, policy)\n\t\t\treturn nil\n\t\t})\n\t})\n\trequire.NoError(t, err, \"Read migrated policies from the bucket\")\n\n\tassert.ElementsMatch(t, expectedPolicies, migratedPolicies)\n}\n"}}},{"rowIdx":2049,"cells":{"text":{"kind":"string","value":"package activerecord\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"strings\"\n\n\t\"github.com/activegraph/activegraph/activesupport\"\n)\n\ntype ErrUnknownPrimaryKey struct {\n\tPrimaryKey string\n\tDescription string\n}\n\nfunc (e *ErrUnknownPrimaryKey) Error() string {\n\treturn fmt.Sprintf(\"Primary key is unknown, %s\", e.Description)\n}\n\ntype R struct {\n\trel *Relation\n\n\ttableName string\n\tprimaryKey string\n\tattrs attributesMap\n\tassocs associationsMap\n\tvalidators validatorsMap\n\treflection *Reflection\n\tconnections *connectionHandler\n}\n\n// TableName sets the table name explicitly.\n//\n//\tVertex := activerecord.New(\"vertex\", func(r *activerecord.R) {\n//\t\tr.TableName(\"vertices\")\n//\t})\nfunc (r *R) TableName(name string) {\n\tr.tableName = name\n}\n\nfunc (r *R) PrimaryKey(name string) {\n\tr.primaryKey = name\n}\n\nfunc (r *R) DefineAttribute(name string, t Type, validators ...AttributeValidator) {\n\tr.attrs[name] = attr{Name: name, Type: t}\n\tr.validators.include(name, typeValidator{t})\n\tr.validators.include(name, validators...)\n}\n\nfunc (r *R) Validates(name string, validator AttributeValidator) {\n\tif v, ok := validator.(activesupport.Initializer); ok {\n\t\terr := v.Initialize()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\t// activesupport.Err(err).Unwrap()\n\t}\n\tr.validators.include(name, validator)\n}\n\nfunc (r *R) ValidatesPresence(names ...string) {\n\tr.validators.extend(names, new(Presence))\n}\n\nfunc (r *R) BelongsTo(name string, init ...func(*BelongsTo)) {\n\tassoc := BelongsTo{targetName: name, owner: r.rel, reflection: r.reflection}\n\n\tswitch len(init) {\n\tcase 0:\n\tcase 1:\n\t\tinit[0](&assoc)\n\tdefault:\n\t\tpanic(activesupport.ErrMultipleVariadicArguments{Name: \"init\"})\n\t}\n\n\tr.attrs[assoc.AssociationForeignKey()] = attr{\n\t\tName: assoc.AssociationForeignKey(),\n\t\tType: Nil{new(Int64)},\n\t}\n\tr.assocs[name] = &assoc\n}\n\nfunc (r *R) HasMany(name string) {\n\t// TODO: Define library methods to pluralize words.\n\ttargetName := strings.TrimSuffix(name, \"s\")\n\n\t// Use plural name for the name of attribute, while target name\n\t// of the association should be in singular (to find a target relation\n\t// through the reflection.\n\tr.assocs[name] = &HasMany{\n\t\ttargetName: targetName, owner: r.rel, reflection: r.reflection,\n\t}\n}\n\nfunc (r *R) HasOne(name string) {\n\tr.assocs[name] = &HasOne{targetName: name, owner: r.rel, reflection: r.reflection}\n}\n\nfunc (r *R) init(ctx context.Context, tableName string) error {\n\tconn, err := r.connections.RetrieveConnection(primaryConnectionName)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefinitions, err := conn.ColumnDefinitions(ctx, tableName)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, column := range definitions {\n\t\tcolumnType := column.Type\n\t\tif !column.NotNull {\n\t\t\tcolumnType = Nil{columnType}\n\t\t}\n\t\tr.DefineAttribute(column.Name, columnType)\n\n\t\tif column.IsPrimaryKey {\n\t\t\tr.PrimaryKey(column.Name)\n\t\t}\n\t}\n\treturn nil\n}\n\ntype Relation struct {\n\tname string\n\ttableName string\n\t// TODO: add *Reflection property.\n\t// reflection *Reflection\n\n\tconn Conn\n\tconnections *connectionHandler\n\n\tscope *attributes\n\tquery *QueryBuilder\n\tctx context.Context\n\n\tassociations\n\tvalidations\n\tAttributeMethods\n}\n\nfunc New(name string, init ...func(*R)) *Relation {\n\tvar (\n\t\trel *Relation\n\t\terr error\n\t)\n\tswitch len(init) {\n\tcase 0:\n\t\trel, err = Initialize(name, nil)\n\tcase 1:\n\t\trel, err = Initialize(name, init[0])\n\tdefault:\n\t\tpanic(&activesupport.ErrMultipleVariadicArguments{Name: \"init\"})\n\t}\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn rel\n}\n\nfunc Initialize(name string, init func(*R)) (*Relation, error) {\n\trel := &Relation{name: name}\n\n\tr := R{\n\t\trel: rel,\n\t\tassocs: make(associationsMap),\n\t\tattrs: make(attributesMap),\n\t\tvalidators: make(validatorsMap),\n\t\treflection: globalReflection,\n\t\tconnections: globalConnectionHandler,\n\t}\n\n\terr := r.init(context.TODO(), name+\"s\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif init != nil {\n\t\tinit(&r)\n\t}\n\n\t// When the primary key was assigned to record builder, mark it explicitely\n\t// wrapping with PrimaryKey structure. Otherwise, fallback to the default primary\n\t// key implementation.\n\tif r.primaryKey != \"\" {\n\t\tattr, ok := r.attrs[r.primaryKey]\n\t\tif !ok {\n\t\t\treturn nil, &ErrUnknownPrimaryKey{r.primaryKey, \"not in attributes\"}\n\t\t}\n\t\tr.attrs[r.primaryKey] = PrimaryKey{Attribute: attr}\n\t}\n\tif r.tableName == \"\" {\n\t\tr.tableName = name + \"s\"\n\t}\n\n\t// The scope is empty by default.\n\tscope, err := newAttributes(name, r.attrs.copy(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tassocs := newAssociations(name, r.assocs.copy(), r.reflection)\n\tvalidations := newValidations(r.validators.copy())\n\n\t// Create the model schema, and register it within a reflection instance.\n\trel.tableName = r.tableName\n\trel.scope = scope\n\trel.associations = *assocs\n\trel.validations = *validations\n\trel.connections = r.connections\n\trel.query = &QueryBuilder{from: r.tableName}\n\trel.AttributeMethods = scope\n\tr.reflection.AddReflection(name, rel)\n\n\treturn rel, nil\n}\n\nfunc (rel *Relation) TableName() string {\n\treturn rel.tableName\n}\n\nfunc (rel *Relation) Name() string {\n\treturn rel.name\n}\n\nfunc (rel *Relation) Copy() *Relation {\n\tscope := rel.scope.copy()\n\n\treturn &Relation{\n\t\tname: rel.name,\n\t\ttableName: rel.tableName,\n\t\tconn: rel.Connection(),\n\t\tconnections: rel.connections,\n\t\tscope: rel.scope.copy(),\n\t\tquery: rel.query.copy(),\n\t\tctx: rel.ctx,\n\t\tassociations: *rel.associations.copy(),\n\t\tvalidations: *rel.validations.copy(),\n\t\tAttributeMethods: scope,\n\t}\n}\n\nfunc (rel *Relation) empty() *Relation {\n\trel.scope, _ = newAttributes(rel.name, nil, nil)\n\treturn rel\n}\n\n// IsEmpty returns true if there are no records.\nfunc (rel *Relation) IsEmpty() bool {\n\t// TODO: implement the method.\n\treturn false\n}\n\nfunc (rel *Relation) Context() context.Context {\n\tif rel.ctx == nil {\n\t\treturn context.Background()\n\t}\n\treturn rel.ctx\n}\n\nfunc (rel *Relation) WithContext(ctx context.Context) *Relation {\n\tnewrel := rel.Copy()\n\tnewrel.ctx = ctx\n\treturn newrel\n}\n\nfunc (rel *Relation) Connect(conn Conn) *Relation {\n\tnewrel := rel.Copy()\n\tnewrel.conn = conn\n\treturn newrel\n}\n\nfunc (rel *Relation) Connection() Conn {\n\tif rel.conn != nil {\n\t\treturn rel.conn\n\t}\n\n\tconn, err := rel.connections.RetrieveConnection(primaryConnectionName)\n\tif err != nil {\n\t\treturn &errConn{err}\n\t}\n\treturn conn\n}\n\nfunc (rel *Relation) New(params ...map[string]interface{}) Result {\n\tswitch len(params) {\n\tcase 0:\n\t\treturn Return(rel.Initialize(nil))\n\tcase 1:\n\t\treturn Return(rel.Initialize(params[0]))\n\tdefault:\n\t\treturn Err(&activesupport.ErrMultipleVariadicArguments{Name: \"params\"})\n\t}\n}\n\nfunc (rel *Relation) Initialize(params map[string]interface{}) (*ActiveRecord, error) {\n\tattributes := rel.scope.clear()\n\terr := attributes.AssignAttributes(params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\trec := &ActiveRecord{\n\t\tname: rel.name,\n\t\ttableName: rel.tableName,\n\t\tconn: rel.Connection(),\n\t\tattributes: attributes,\n\t\tassociations: rel.associations.copy(),\n\t\tvalidations: *rel.validations.copy(),\n\t}\n\treturn rec.init(), nil\n}\n\nfunc (rel *Relation) Create(params map[string]interface{}) Result {\n\treturn Return(rel.Initialize(params)).Insert()\n}\n\nfunc (rel *Relation) ExtractRecord(h activesupport.Hash) (*ActiveRecord, error) {\n\tvar (\n\t\tattrNames = rel.scope.AttributeNames()\n\t\tcolumnNames = rel.scope.ColumnNames()\n\t)\n\n\tparams := make(activesupport.Hash, len(attrNames))\n\tfor i, colName := range columnNames {\n\t\tattrName := attrNames[i]\n\t\tattr := rel.scope.AttributeForInspect(attrName)\n\n\t\tattrValue, err := attr.AttributeType().Deserialize(h[colName])\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tparams[attrName] = attrValue\n\t}\n\n\treturn rel.Initialize(params)\n}\n\n// PrimaryKey returns the attribute name of the record's primary key.\nfunc (rel *Relation) PrimaryKey() string {\n\treturn rel.scope.PrimaryKey()\n}\n\nfunc (rel *Relation) All() CollectionResult {\n\treturn ReturnCollection(rel, nil)\n}\n\n// TODO: move to the Schema type all column-related methods.\nfunc (rel *Relation) ColumnNames() []string {\n\treturn rel.scope.ColumnNames()\n}\n\nfunc (rel *Relation) Each(fn func(*ActiveRecord) error) error {\n\tq := rel.query.copy()\n\tq.Select(rel.ColumnNames()...)\n\n\t// Include all join dependencies into the query with fully-qualified column\n\t// names, so each part of the request can be extracted individually.\n\tfor _, join := range rel.query.joinValues {\n\t\tq.Select(join.Relation.ColumnNames()...)\n\t}\n\n\tvar lasterr error\n\n\terr := rel.Connection().ExecQuery(rel.Context(), q.Operation(), func(h activesupport.Hash) bool {\n\t\trec, e := rel.ExtractRecord(h)\n\t\tif lasterr = e; e != nil {\n\t\t\treturn false\n\t\t}\n\n\t\tfor _, join := range rel.query.joinValues {\n\t\t\tarec, e := join.Relation.ExtractRecord(h)\n\t\t\tif lasterr = e; e != nil {\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\te = rec.AssignAssociation(join.Relation.Name(), arec)\n\t\t\tif lasterr = e; e != nil {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\n\t\tif lasterr = fn(rec); lasterr != nil {\n\t\t\treturn false\n\t\t}\n\t\treturn true\n\t})\n\n\tif lasterr != nil {\n\t\treturn lasterr\n\t}\n\treturn err\n}\n\nfunc (rel *Relation) Where(cond string, arg interface{}) *Relation {\n\tnewrel := rel.Copy()\n\n\t// When the condition is a regular column, pass it through the regular\n\t// column comparison instead of query chain predicates.\n\tif newrel.scope.HasAttribute(cond) {\n\t\t// newrel.scope.AssignAttribute(cond, arg)\n\t\tnewrel.query.Where(fmt.Sprintf(\"%s = ?\", cond), arg)\n\t} else {\n\t\tnewrel.query.Where(cond, arg)\n\t}\n\treturn newrel\n}\n\n// Select allows to specify a subset of fields to return.\n//\n// Method returns a new relation, where a set of attributes is limited by the\n// specified list.\n//\n//\tModel.Select(\"field\", \"other_field\")\n//\t// #\n//\n// Accessing attributes of a Record that do not have fields retrieved by a select\n// except id with return nil.\n//\n//\tmodel, _ := Model.Select(\"field\").Find(1)\n//\tmodel.Attribute(\"other_field\") // Returns nil\nfunc (rel *Relation) Select(attrNames ...string) *Relation {\n\tnewrel := rel.Copy()\n\n\tif !newrel.scope.HasAttributes(attrNames...) {\n\t\treturn newrel.empty()\n\t}\n\n\tattrMap := make(map[string]struct{}, len(attrNames))\n\tfor _, attrName := range attrNames {\n\t\tattrMap[attrName] = struct{}{}\n\t}\n\n\tfor _, attrName := range newrel.scope.AttributeNames() {\n\t\tif _, ok := attrMap[attrName]; !ok {\n\t\t\tnewrel.scope.ExceptAttribute(attrName)\n\t\t}\n\t}\n\treturn newrel\n}\n\nfunc (rel *Relation) Group(attrNames ...string) *Relation {\n\tnewrel := rel.Copy()\n\n\t// When the attribute is not part of the scope, return an empty relation.\n\tif !newrel.scope.HasAttributes(attrNames...) {\n\t\treturn newrel.empty()\n\t}\n\n\tnewrel.query.Group(attrNames...)\n\treturn newrel\n}\n\n// Limit specifies a limit for the number of records to retrieve.\n//\n//\tUser.Limit(10) // Generated SQL has 'LIMIT 10'\nfunc (rel *Relation) Limit(num int) *Relation {\n\tnewrel := rel.Copy()\n\tnewrel.query.Limit(num)\n\treturn newrel\n}\n\nfunc (rel *Relation) Joins(assocNames ...string) *Relation {\n\tnewrel := rel.Copy()\n\n\tfor _, assocName := range assocNames {\n\t\tassociation := newrel.ReflectOnAssociation(assocName)\n\t\tif association == nil {\n\t\t\treturn newrel.empty()\n\t\t}\n\n\t\tnewrel.query.Join(association.Relation.Copy(), association.Association)\n\t}\n\treturn newrel\n}\n\nfunc (rel *Relation) Find(id interface{}) Result {\n\tvar q QueryBuilder\n\tq.From(rel.TableName())\n\tq.Select(rel.scope.AttributeNames()...)\n\t// TODO: consider using unified approach.\n\tq.Where(fmt.Sprintf(\"%s = ?\", rel.PrimaryKey()), id)\n\n\tvar rows []activesupport.Hash\n\n\tif err := rel.Connection().ExecQuery(rel.Context(), q.Operation(), func(h activesupport.Hash) bool {\n\t\trows = append(rows, h)\n\t\treturn true\n\t}); err != nil {\n\t\treturn Err(err)\n\t}\n\n\tif len(rows) != 1 {\n\t\treturn Err(ErrRecordNotFound{PrimaryKey: rel.PrimaryKey(), ID: id})\n\t}\n\treturn rel.New(rows[0])\n}\n\n// FindBy returns a record matching the specified condition.\n//\n//\tperson := Person.FindBy(\"name\", \"Bill\")\n//\t// Ok(Some(#))\n//\n//\tperson := Person.FindBy(\"salary > ?\", 10000)\n//\t// Ok(Some(#))\nfunc (rel *Relation) FindBy(cond string, arg interface{}) Result {\n\treturn rel.Where(cond, arg).First()\n}\n\n// First find returns the first record.\nfunc (rel *Relation) First() Result {\n\trecords, err := rel.Limit(1).ToA()\n\tif err != nil {\n\t\treturn Err(err)\n\t}\n\tswitch len(records) {\n\tcase 0:\n\t\treturn Ok(None)\n\tdefault:\n\t\treturn Ok(Some(records[0]))\n\t}\n}\n\nfunc (rel *Relation) InsertAll(params ...map[string]interface{}) (\n\trr []*ActiveRecord, err error,\n) {\n\trr = make([]*ActiveRecord, 0, len(params))\n\tfor _, h := range params {\n\t\trec, err := rel.Initialize(h)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\trr = append(rr, rec)\n\t}\n\n\tif err = rel.connections.Transaction(rel.Context(), func() error {\n\t\tfor i, rec := range rr {\n\t\t\tif rr[i], err = rec.Insert(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}); err != nil {\n\t\treturn nil, err\n\t}\n\treturn rr, nil\n}\n\n// ToA converts Relation to array. The method access database to retrieve objects.\nfunc (rel *Relation) ToA() (Array, error) {\n\tvar rr Array\n\n\tif err := rel.Each(func(r *ActiveRecord) error {\n\t\trr = append(rr, r)\n\t\treturn nil\n\t}); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn rr, nil\n}\n\n// ToSQL returns sql statement for the relation.\n//\n//\tUser.Where(\"name\", \"Oscar\").ToSQL()\n//\t// SELECT * FROM \"users\" WHERE \"name\" = ?\nfunc (rel *Relation) ToSQL() string {\n\treturn rel.query.String()\n}\n\nfunc (rel *Relation) String() string {\n\tvar buf strings.Builder\n\tfmt.Fprintf(&buf, \"%s(\", strings.Title(rel.name))\n\n\tattrs := rel.AttributesForInspect()\n\tfor i, attr := range attrs {\n\t\tfmt.Fprintf(&buf, \"%s: %s\", attr.AttributeName(), attr.AttributeType())\n\t\tif i < len(attrs)-1 {\n\t\t\tfmt.Fprint(&buf, \", \")\n\t\t}\n\t}\n\n\tfmt.Fprintf(&buf, \")\")\n\treturn buf.String()\n}\n"}}},{"rowIdx":2050,"cells":{"text":{"kind":"string","value":"from compas.geometry.primitives.frame import Frame\n\n\nclass BeamStorage(object):\n\n def __init__(self, frame=None, y_count=5, y_spacing=140, z_spacing=140):\n # type: (Frame, int, float, float) -> None\n \"\"\"Frame should have X pointing along beam length and Z pointing to world Z\"\"\"\n self.frame = frame # type: (Frame) # Frame where the\n self.y_count = y_count\n self.y_spacing = y_spacing\n self.z_spacing = z_spacing\n\n def to_data(self):\n \"\"\"Simpliest way to get this class serialized.\n \"\"\"\n return self.data\n\n @classmethod\n def from_data(cls, data):\n \"\"\"Construct a Movement from structured data. Subclass must add their properity to\n the data properity.\n \"\"\"\n beamstorage = cls()\n beamstorage.data = data\n return beamstorage\n\n @property\n def data(self):\n data = {}\n data['frame'] = self.frame\n data['y_count'] = self.y_count\n data['y_spacing'] = self.y_spacing\n data['z_spacing'] = self.z_spacing\n return data\n\n @data.setter\n def data(self, data):\n self.frame = data.get('frame', Frame.worldXY())\n self.y_count = data.get('y_count', 5)\n self.y_spacing = data.get('y_spacing', 140)\n self.z_spacing = data.get('z_spacing', 140)\n\n def get_storage_frame(self, beam_seq, total_beam_count = 0):\n # type(int) -> Frame\n \"\"\"Get the storage frame of a particular beam based on the sequence number (zero start)\n The algorithm is a simple Y first and then Z. \n\n The returned frame have X pointing along the beam length and\n Z pointing to world Up. You can align the grasp face's face_frame such\n that the beam is stored in the same orientation with the gripping direction.\n and optionally compensate the depth of the beam by moving the beam up.\n \"\"\"\n # Reverse the order (since we pick form the top)\n if total_beam_count > 0 :\n beam_seq = total_beam_count - beam_seq - 1\n\n y = (beam_seq % self.y_count)\n z = beam_seq // self.y_count\n\n y_offset = y * self.y_spacing\n z_offset = z * self.z_spacing\n\n transform_vector = self.frame.yaxis.unitized().scaled(y_offset) + self.frame.zaxis.unitized().scaled(z_offset)\n return Frame(self.frame.point + transform_vector, self.frame.xaxis.copy(), self.frame.yaxis.copy())\n"}}},{"rowIdx":2051,"cells":{"text":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\nimport requests\nfrom lxml import etree\nimport SaveData\nimport random\n\nclass Get_album_and_aongs:\n '''\n 通过专辑号获取专辑信息和歌曲信息\n '''\n def __init__(self, album_id, proxy_pool):\n self.album_id = album_id\n self.proxy_pool = proxy_pool\n \n def get_album_and_songs(self):\n '''\n 该函数用于通过专辑号获取专辑信息\n 以及该专辑包含的所有歌曲信息\n 将获取的信息保存到数据库\n '''\n url = 'http://music.163.com/album?id=' + self.album_id\n headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0'}\n ip = random.choice(self.proxy_pool)\n proxies = {'http': ip}\n try:\n r = requests.get(url, headers=headers, proxies=proxies, timeout=3) #请求一张专辑的歌曲列表页面\n if r.status_code == 404:\n return -1\n else:\n r.raise_for_status()\n html = etree.HTML(r.text)\n \n '''解析获取专辑信息,包括专辑号、专辑名、歌手号、歌手名、发行时间和发行单位'''\n album_info = {} \n album_info['album_id'] = self.album_id\n album_info['album_name'] = html.xpath(\"//h2[@class='f-ff2']/text()\")\n album_info['singer_id'] = html.xpath(\"//p[@class='intr']//a/@href\")[0].replace('/artist?id=', '')\n album_info['singer_name'] = html.xpath(\"//p[@class='intr']//a/text()\")\n album_info['release_time'] = html.xpath(\"//p[@class='intr']/text()\")[0]\n if len(html.xpath(\"//p[@class='intr']/text()\")) > 1:\n album_info['release_company'] = html.xpath(\"//p[@class='intr']/text()\")[1].strip()\n else: \n album_info['release_company'] = '无' #有些专辑没有标明发行单位,此类统统用‘无’表示\n \n '''解析获取一张专辑的所有歌曲信息''' \n songs_info = [] \n for i in range(len(html.xpath(\"//ul[@class='f-hide']/li\"))):\n '''解析获取一首歌的信息,包括歌曲号、歌曲名、所属专辑号、所属专辑名'''\n song_info = {} \n song_info['song_id'] = html.xpath(\"//ul[@class='f-hide']/li/a/@href\")[i].replace('/song?id=', '')\n song_info['song_name'] = html.xpath(\"//ul[@class='f-hide']/li/a/text()\")[i]\n song_info['album_id'] = album_info['album_id']\n song_info['album_name'] = album_info['album_name']\n songs_info.append(song_info)\n \n '''调用函数,保存专辑信息和歌曲信息到数据库'''\n SaveData.save_album_info(album_info)\n SaveData.save_songs_info(songs_info)\n \n print(\"专辑id为\"+ self.album_id +\"的信息获取完毕\")\n return 1\n except:\n print(\"专辑id为\"+ self.album_id +\"的信息获取失败\")\n print(\"正在重新获取\")\n return None\n "}}},{"rowIdx":2052,"cells":{"text":{"kind":"string","value":"/**\n * Copyright 2018 gd Author. All Rights Reserved.\n * Author: Chuck1024\n */\n\npackage discovery\n\nimport (\n\t\"github.com/chuck1024/gd/service\"\n)\n\nvar (\n\tdefaultConf = \"conf/conf.ini\"\n)\n\ntype DogDiscovery interface {\n\tStart() error\n\tClose()\n\tWatch(key, node string) error\n\tWatchMulti(nodes map[string]string) error\n\tAddNode(key string, info service.NodeInfo)\n\tDelNode(key string, addr string)\n\tGetNodeInfo(key string) (nodesInfo []service.NodeInfo)\n}\n"}}},{"rowIdx":2053,"cells":{"text":{"kind":"string","value":"import { PublicKey } from '@solana/web3.js';\nimport { getPayer, getRpcUrl} from '../utils';\nimport { Connection, NodeWallet, programs, actions } from '@metaplex/js';\n\n\n\n\nasync function getVaultInfo(vaultAddress) {\n const rpcUrl = await getRpcUrl();\n let connection = new Connection(rpcUrl, 'confirmed');\n const vault = await programs.vault.Vault.load(connection, vaultAddress);\n \n console.log(vault.data.authority);\n\n}\n\ngetVaultInfo(new PublicKey(\"AvLtCwsoqXe2jr2rQ1wwvXF8LD6g9PcR8Qz8ygy5ARmF\"))"}}},{"rowIdx":2054,"cells":{"text":{"kind":"string","value":"package net.apptronic.test.commons_sample_compat_app\n\nimport android.content.Intent\nimport android.os.Bundle\nimport net.apptronic.test.commons_sample_compat_app.about.AboutActivity\nimport net.apptronic.test.commons_sample_compat_app.data.UserData\nimport net.apptronic.test.commons_sample_compat_app.fragments.dialog.SampleDialog\nimport net.apptronic.test.commons_sample_compat_app.fragments.enterdata.EnterDataFragment\nimport net.apptronic.test.commons_sample_compat_app.fragments.showdata.KEY_USER_DATA\nimport net.apptronic.test.commons_sample_compat_app.fragments.showdata.ShowDataFragment\n\nclass RouterImpl(private val mainActivity: MainActivity) : Router {\n\n override fun openAbout() {\n mainActivity.startActivity(Intent(mainActivity, AboutActivity::class.java))\n }\n\n override fun openDialog() {\n mainActivity.supportFragmentManager.beginTransaction()\n .add(SampleDialog(), null)\n .commit()\n }\n\n override fun goToEnterData() {\n mainActivity.replaceFragmentWithAddToBackStack(EnterDataFragment())\n }\n\n override fun goToShowUserData(data: UserData) {\n mainActivity.replaceFragmentWithAddToBackStack(ShowDataFragment().apply {\n arguments = Bundle().apply {\n putSerializable(KEY_USER_DATA, data)\n }\n })\n }\n\n}"}}},{"rowIdx":2055,"cells":{"text":{"kind":"string","value":"import json\nfrom enum import Enum, auto\n\nfrom . import profile, oauth\n\n\nclass OutputFormat(Enum):\n json = auto()\n shell = auto()\n config = auto()\n\n\ndef output(fmt: OutputFormat, tokens: oauth.Tokens, **kwargs: str) -> None:\n if fmt == OutputFormat.json:\n print(json.dumps(tokens._asdict(), indent=4))\n elif fmt == OutputFormat.shell:\n print(f\"export ACCESS_TOKEN={tokens.access_token}\")\n print(f\"export ID_TOKEN={tokens.id_token}\")\n elif fmt == OutputFormat.config:\n profile.set_credentials(profile_name=kwargs[\"profile\"], credentials=tokens)\n else:\n raise ValueError(f\"Output format {format} not implemented\")\n"}}},{"rowIdx":2056,"cells":{"text":{"kind":"string","value":"/*\n * Copyright (C) 2016-2019 Lightbend Inc. \n */\n\npackage com.lightbend.lagom.javadsl.api\n\nimport org.scalatest.{ Inside, Matchers, WordSpec }\n\nclass ScalaSupportSpec extends WordSpec with Matchers with Inside {\n\n \"scala support\" should {\n \"resolve a function\" in {\n val method: ScalaServiceSupport.ScalaMethodCall[String] = testMethod _\n method.method.getDeclaringClass should ===(this.getClass)\n method.method.getName should ===(\"testMethod\")\n }\n }\n\n def testMethod(s: String): String = s\n\n}\n"}}},{"rowIdx":2057,"cells":{"text":{"kind":"string","value":"---\nlayout: default\ntitle: Ferramentas\ndescription: Ferramentas utilizadas pela nossa empresa!\n---\n\n## Ferramentas\n\n[Trello](https://trello.com)\n\n[Bug Track](https://www.bugtrack.net)\n\n[Git Hub](https://github.com)\n\n[BPMN.io](https://demo.bpmn.io/s/start)\n\n\n\n\n"}}},{"rowIdx":2058,"cells":{"text":{"kind":"string","value":"// Copyright Luc Yriarte 2018 \n// License: Apache-2.0\n\npackage main\n\nimport (\n\t\"github.com/hyperledger/fabric/core/chaincode/shim\"\n)\n\ntype Storable interface {\n\tPut(stub shim.ChaincodeStubInterface, key string) error\n\tGet(stub shim.ChaincodeStubInterface, key string) error\n}\n"}}},{"rowIdx":2059,"cells":{"text":{"kind":"string","value":"#!/bin/bash\ncd ../../\nbrew install boost --with-python\nbrew install boost-python ffmpeg xerces-c mono\nbrew cask install java\nschemas=\"$(pwd)/Schemas\"\necho \"export MALMO_XSD_PATH=$schemas\" >> ~/.bashrc\nsource ~/.bashrc\n"}}},{"rowIdx":2060,"cells":{"text":{"kind":"string","value":"package com.quickbirdstudios.surveykit.backend.presenter\n\nimport com.quickbirdstudios.surveykit.FinishReason\nimport com.quickbirdstudios.surveykit.result.StepResult\n\nsealed class NextAction {\n data class Next(val result: StepResult) : NextAction()\n data class Back(val result: StepResult) : NextAction()\n object Skip : NextAction()\n data class Close(val result: StepResult, val finishReason: FinishReason) : NextAction()\n}\n"}}},{"rowIdx":2061,"cells":{"text":{"kind":"string","value":"import 'dart:async';\n\nimport 'package:academy_app/data/repository/failures/firestore_failure.dart';\nimport 'package:academy_app/data/repository/user/i_user_repository.dart';\nimport 'package:academy_app/model/user_data/user_data.dart';\nimport 'package:bloc/bloc.dart';\nimport 'package:freezed_annotation/freezed_annotation.dart';\nimport 'package:injectable/injectable.dart';\n\npart 'get_users_state.dart';\npart 'get_users_cubit.freezed.dart';\n\n@injectable\nclass GetUsersCubit extends Cubit {\n final IUserRepo iUserRepo;\n GetUsersCubit(this.iUserRepo) : super(GetUsersState.initial());\n String query = '';\n List filterUsers = [];\n late StreamSubscription streamSubscription;\n Future getUsers() async {\n streamSubscription = iUserRepo.getUsers(query).listen(\n (failureOrSucces) {\n failureOrSucces.fold(\n (failure) => emit(\n GetUsersState.loadFailure(failure),\n ),\n (users) {\n filterUsers = users;\n return emit(\n GetUsersState.loadSuccess(users),\n );\n },\n );\n },\n );\n }\n\n void listChanged(List users, String queryChanged) {\n query = queryChanged;\n filterUsers = users.where((user) {\n final userName = user.userName!.getOrCrash().toLowerCase();\n final search = queryChanged.toLowerCase();\n return userName.contains(search);\n }).toList();\n }\n\n @override\n Future close() {\n streamSubscription.cancel();\n return super.close();\n }\n}\n"}}},{"rowIdx":2062,"cells":{"text":{"kind":"string","value":"def coord(path)\n e = 0\n ne = 0\n path = path.chars\n until path.empty? do\n s = path.shift\n case s\n when ?e\n e += 1\n when ?w\n e -= 1\n when ?n\n t = path.shift\n case t\n when ?e\n ne += 1\n when ?w\n e -= 1\n ne += 1\n end\n when ?s\n t = path.shift\n case t\n when ?e\n e += 1\n ne -= 1\n when ?w\n ne -= 1\n end\n end\n end\n [e, ne]\nend\n\nblacks = []\n\n$stdin.readlines.map(&:strip).each do |path|\n c = coord(path)\n if blacks.include?(c)\n blacks.delete(c)\n else\n blacks.push(c)\n end\nend\n\nputs blacks.length\n"}}},{"rowIdx":2063,"cells":{"text":{"kind":"string","value":"//**********************\n//Hosting eDrawings control in Windows Forms\n//Copyright(C) 2019 www.codestack.net\n//License: https://github.com/codestack-net-dev/solidworks-api-examples/blob/master/LICENSE\n//Product URL: https://www.codestack.net/edrawings-api/gettings-started/winforms/\n//**********************\n\nusing System;\nusing System.Windows.Forms;\nusing eDrawings.Interop.EModelViewControl;\n\nnamespace CodeStack.Examples.eDrawingsApi\n{\n public partial class EDrawingsUserControl : UserControl\n {\n public event Action EDrawingsControlLoaded;\n\n public EDrawingsUserControl()\n {\n InitializeComponent();\n }\n\n public void LoadEDrawings()\n {\n var host = new EDrawingsHost();\n host.ControlLoaded += OnControlLoaded;\n this.Controls.Add(host);\n host.Dock = DockStyle.Fill;\n }\n \n private void OnControlLoaded(EModelViewControl ctrl)\n {\n EDrawingsControlLoaded?.Invoke(ctrl);\n }\n }\n}\n"}}},{"rowIdx":2064,"cells":{"text":{"kind":"string","value":"export function lowercaseStaticParts(path: string): string {\n return path\n .split('/')\n .map((part) => {\n return part.startsWith(':') ? part : part.toLowerCase();\n })\n .join('/');\n}\n"}}},{"rowIdx":2065,"cells":{"text":{"kind":"string","value":"package com.gowtham.letschat.fragments\n\nimport android.os.Bundle\nimport android.view.LayoutInflater\nimport android.view.View\nimport android.view.ViewGroup\nimport com.google.android.material.bottomsheet.BottomSheetDialogFragment\nimport com.gowtham.letschat.databinding.FAttachmentBinding\nimport com.gowtham.letschat.databinding.FImageSrcSheetBinding\nimport com.gowtham.letschat.utils.BottomSheetEvent\nimport org.greenrobot.eventbus.EventBus\n\nclass FAttachment : BottomSheetDialogFragment() {\n\n private lateinit var binding: FAttachmentBinding\n\n companion object{\n fun newInstance(bundle : Bundle) : FAttachment{\n val fragment = FAttachment()\n fragment.arguments=bundle\n return fragment\n }\n }\n\n override fun onCreateView(\n inflater: LayoutInflater,\n container: ViewGroup?,\n savedInstanceState: Bundle?): View {\n binding = FAttachmentBinding.inflate(layoutInflater, container, false)\n return binding.root\n }\n\n override fun onViewCreated(view: View, savedInstanceState: Bundle?) {\n super.onViewCreated(view, savedInstanceState)\n\n binding.imgCamera.setOnClickListener {\n EventBus.getDefault().post(BottomSheetEvent(0))\n dismiss()\n }\n\n binding.imgGallery.setOnClickListener {\n EventBus.getDefault().post(BottomSheetEvent(1))\n dismiss()\n }\n\n binding.videoGallery.setOnClickListener {\n EventBus.getDefault().post(BottomSheetEvent(2))\n dismiss()\n }\n\n binding.videoCamera.setOnClickListener {\n EventBus.getDefault().post(BottomSheetEvent(3))\n dismiss()\n }\n\n }\n}"}}},{"rowIdx":2066,"cells":{"text":{"kind":"string","value":"# Conway's Game of Life\n\nA C implementation of Conway's Game of Life using ncurses.\n\n### Getting Started\n\n```sh\n# Substitute `gmake` if on *BSD\n$ make\n$ ./conway\n```\n\nPress 'q' or CTRL-C to exit. A full list of keybindings can be displayed by pressing '?'.\n\n`conway` can also read a starting position from a cells formatted text file.\n\n```sh\n$ ./conway patterns/glider.cells\n```\n"}}},{"rowIdx":2067,"cells":{"text":{"kind":"string","value":"module ElasticsearchDslBuilder\n # @abstact Exceptions raised by ElasticsearchDslBuilder inherit from Error\n class Error < StandardError; end\n\n # Exception raised when Queries::Query.to_hash attempts to build invalid query\n class InvalidQuery < Error; end\nend\n"}}},{"rowIdx":2068,"cells":{"text":{"kind":"string","value":"require \"alexa_string_tools/version\"\nrequire \"humanize\"\n\nmodule AlexaStringTools\n\n # we'll lazy load this mapping.\n @@alexa_string_mapping = nil\n\n def email_from_alexa\n string = convert_from_alexa_string_to_email(self)\n string.strip\n end\n\n private\n\n def convert_from_alexa_string_to_email(string)\n\n alexa_string_mapping.each do |from,to|\n string = string.gsub(from, to)\n end\n\n string.gsub(' ', '')\n end\n\n def alexa_string_mapping\n return @@alexa_string_mapping if @@alexa_string_mapping\n\n @@alexa_string_mapping = {}\n\n # we don’t have to pad these with spaces because the results are crunched together in the end.\n ('A'..'Z').to_a.each do |letter|\n @@alexa_string_mapping[\"#{letter}.\"] = letter\n @@alexa_string_mapping[\"#{letter.downcase}.\"] = letter.downcase\n end\n\n # it’s okay to pad these with spaces, because they can’t be at the beginning or end anyway.\n @@alexa_string_mapping.merge!({\n ' at ' => '@',\n ' dot ' => '.',\n ' period ' => '.',\n ' underscore ' => '_',\n ' plus ' => '+',\n })\n\n # spoken numbers are always converted into integers.\n (0..10000).to_a.reverse.each do |number|\n number_as_string = number.humanize\n number_as_string = number_as_string.gsub(/,/, '')\n number_as_string = number_as_string.gsub(/-/, ' ')\n @@alexa_string_mapping[number_as_string] = number.to_s\n end\n\n @@alexa_string_mapping\n end\nend\n\nString.class_eval do\n include AlexaStringTools\nend\n\n# trigger the loading of the string mapping automatically at load time.\n\"\".email_from_alexa\n"}}},{"rowIdx":2069,"cells":{"text":{"kind":"string","value":"package com.xaron.equilinoxmodded.framework.blueprintgen.components;\n\nimport java.io.IOException;\n\nimport com.xaron.equilinoxmodded.framework.CsvWriter;\nimport com.xaron.equilinoxmodded.framework.blueprintgen.components.deathai.DeathAIGen;\n\nimport food.FoodSectionType;\n\npublic class FoodComponentGen extends ComponentGen {\n\n\tpublic class FoodSection {\n\t\t\n\t\tprivate int name;\n\t private int foodPoints;\n\t private FoodSectionType type;\n\t private int portions = 0;\n\t private DeathAIGen deathAi;\n\t\t\n\t\tpublic FoodSection(int name, int foodPoints, FoodSectionType type, DeathAIGen deathAi) {\n\t\t\tthis.name = name;\n\t\t\tthis.foodPoints = foodPoints;\n\t\t\tthis.type = type;\n\t\t\tthis.deathAi = deathAi;\n\t\t}\n\t\t\n\t\tpublic FoodSection(int name, int foodPoints, int portions) {\n\t\t\tthis.name = name;\n\t\t\tthis.foodPoints = foodPoints;\n\t\t\tthis.type = FoodSectionType.TO_SHARE;\n\t\t\tthis.portions = portions;\n\t\t}\n\t}\n\t\n\tprivate FoodSection[] foodSections;\n\t\n\tpublic FoodComponentGen(FoodSection[] foodSections) {\n\t\tsuper(\"FOOD\");\n\t\tthis.foodSections = foodSections;\n\t}\n\t\n\t@Override\n\tpublic void writeComponent(CsvWriter writer) throws IOException {\n\t\tsuper.writeComponent(writer);\n\t\twriter.writeInt(foodSections.length);\n\t\t\n\t\tfor (int i = 0; i < foodSections.length; i++) {\n\t\t\twriter.writeInt(foodSections[i].name);\n\t\t\twriter.writeInt(foodSections[i].foodPoints);\n\t\t\twriter.writeString(foodSections[i].type.name());\n\t\t\tif (foodSections[i].type == FoodSectionType.TO_SHARE)\n\t\t\t\twriter.writeLabelInt(\"portions\", foodSections[i].portions);\n\t\t\telse if (foodSections[i].type == FoodSectionType.WHOLE)\n\t\t\t\tfoodSections[i].deathAi.write(writer);\n\t\t\telse if (foodSections[i].type == FoodSectionType.ROOT_VEG)\n\t\t\t\tfoodSections[i].deathAi.write(writer);\n\t\t}\n\t}\n\n}\n"}}},{"rowIdx":2070,"cells":{"text":{"kind":"string","value":"// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file\n// for details. All rights reserved. Use of this source code is governed by a\n// BSD-style license that can be found in the LICENSE file.\n\nlibrary which.test.candidate_paths;\n\nimport 'package:unittest/unittest.dart';\n\nimport 'util.dart';\n\nmain() {\n group('getCandidatePaths', () {\n test('posix', () {\n var candidatePaths = getPosixCandidatePaths('z', '/x/y:/a/b/c', '/foo/bar');\n expect(candidatePaths, ['/x/y/z', '/a/b/c/z']);\n });\n\n test('windows', () {\n var candidatePaths = getWindowsCandidatePaths('z', r'C:\\x\\y;C:\\a\\b\\c', '.EXE;.BAT', r'C:\\foo\\bar');\n expect(candidatePaths, [\n r'C:\\foo\\bar\\z.EXE',\n r'C:\\foo\\bar\\z.BAT',\n r'C:\\x\\y\\z.EXE',\n r'C:\\x\\y\\z.BAT',\n r'C:\\a\\b\\c\\z.EXE',\n r'C:\\a\\b\\c\\z.BAT']);\n });\n });\n}\n"}}},{"rowIdx":2071,"cells":{"text":{"kind":"string","value":"package it.sephiroth.android.library.kotlin_extensions.io.reactivex\n\nimport io.reactivex.disposables.Disposable\n\nfun Disposable.addTo(autoDisposable: AutoDisposable): Disposable {\n autoDisposable.add(this)\n return this\n}"}}},{"rowIdx":2072,"cells":{"text":{"kind":"string","value":"#pragma once\n#ifndef UTILS_HPP\n#define UTILS_HPP\n\nnamespace cave {\n\n\t/**Vector de tres componentes.\n*\n*/\n\tstruct caveVec3f {\n\t\tfloat x;\n\t\tfloat y;\n\t\tfloat z;\n\n\t\tcaveVec3f(float x, float y, float z) {\n\t\t\t\tthis->x = x;\n\t\t\t\tthis->y = y;\n\t\t\t\tthis->z = z;\n\t\t}\n\n\t\tcaveVec3f() = default;\n\t};\n\n\t/**Cuaternion.\n*\n*/\n\tstruct caveQuat {\n\t\tfloat x;\n\t\tfloat y;\n\t\tfloat z;\n\t\tfloat w;\n\n\t\tcaveQuat(float x, float y, float z, float w) {\n\t\t\tthis->w = w;\n\t\t\tthis->x = x;\n\t\t\tthis->y = y;\n\t\t\tthis->z = z;\n\t\t}\n\n\t\tcaveQuat() = default;\n\t};\n\n\t/**Color rgba.\n*\n*/\n\tstruct caveColour {\n\t\tfloat r;\n\t\tfloat g;\n\t\tfloat b;\n\t\tfloat alpha;\n\n\t\tcaveColour(float r=1.0f, float g=1.0f, float b=1.0f, float alpha=1.0f) {\n\t\t\tthis->r = r;\n\t\t\tthis->g = g;\n\t\t\tthis->b = b;\n\t\t\tthis->alpha = alpha;\n\t\t}\n\n\t\tcaveColour() = default;\n\t};\n\n\n}\n\n\n#endif"}}},{"rowIdx":2073,"cells":{"text":{"kind":"string","value":"## 웹\n\n안녕하세요. ! MONKEY.D 입니다 :-) \n\n웹 스크래핑 하는 프레임을 간단하게 적어볼거에요.\n\n요번에는 저만이 참고하는 용으로 작성할 거여서 양해 부탁드립니다. \n\n```python\n#셀레니움 기본 프레임\nfrom selenium import webdriver\nbrowser = webdriver.Chrome()\nbrowser.maximize_window()\n\n\nurl = \"url넣을 주소\"\nbrowser.get(url) #사이트로 이동.\n```\n\n```python\ndriver.find_element_by_xpath('xpath주소!').click()\n```\n\n```python\n#뷰티플숩 기본 프레임\nimport requests\nfrom bs4 import BeautifulSoup\nurl = \"가져올 url주소\"\n변수 = requests.get(url)\n변수.raise_for_status() #변수.raise_for_status() 만약 홈페이지 보안상의 이유나 모종의 이유로 스크래핑이 불가능한 경우에 오류를 내는 함수식입니다.\nsoup=Beautifulsoup(변수.text, \"lxml\")\n```\n\n***정규식***\n\n우리가 어떤 정보를 찾을 때 그래도 대부분의 형식이 정해져있겠죠? 낙엽을 긁개로 쓸어올 때 댕겨와서 긁지 막 던져서 그걸 주워오지는 않지 않습니까. 이런 정규식들을 알아보도록 하겠습니다.\n\n먼저 정규식을 쓰기 위해선 준비가 되어있어야겠죠?\n\n```python\nimport re\n변수=re.compile(\"xx.xx\")\nor\n변수=re.compile(\"^xxx\")\nor\n변수=re.compile(\"xxxx$\")\n\n```\n\n이런식으로 내가 먼저 문자를 어떤 방식으로 찾을 것인지를 정해주어어야합니다.\n\n\n\n**\".\"**이 들어가는 경우에는 **\".\"**한글자를 제외하고 일치하는 문자를 다 찾습니다. 예를 들어 변수=re.compile(\"ca.e\")\n\n라고 한다면 \"cafe\",\"case\",\"care\"등과 같은 단어들을 요구합니다.\n\n\n\n**\"^\"**이 들어가는 경우에는 **' ~로 시작하는'** 의미를 가집니다. 예를 들어 변수=re.compile(\"^de\")\n\n라고 한다면 \"destination\",\"deep\"등과 같은 단어들을 요구합니다.\n\n****\n\n**\"$\"**이 들어가는 경우에는 '~로 끝나는' 의미를 가집니다. 예를 들어 변수=re.compile(\"se$\")\n\n라고 한다면 \"case\",\"base\"등과 같은 단어들을 요구합니다.\n\n\n\n이렇게 먼저 원하는 문자의 형태를 정해준 다음에 주어진 문자를 입력해서 요구한 단어와 일치하는지를 확인해야 합니다.\n\n\n\n그 문자들을 입력하는 형태는 다음과 같습니다. \n\n```python\nm=p.match(\"비교할 문자열\") \nor\nm=p.search(\"비교할 문자열\") \nor\nm=p.findall(\"비교할 문자열\") \n```\n\n**m=p.match(\"비교할 문자열\")** \n\n : 주어진 문자열의 처음부터 일치하는지 확인합니다. 그런데 match함수는 비교할 문자열의 처음부터 일치하는지를 확인하기 때문에 첫부분만 일치하면 뒤의 어떤말이 와도 맞다고 판단합니다.\n\n\n\n**m=p.search(\"비교할 문자열\")**\n\n : 주어진 문자열 중에 일치하는 게 있는지 확인, 즉 중간에 단어가 껴있어도 있기만 한다면 요구하는 단어에 맞다고 판단합니다.\n\n\n\n**m=p.findall(\"비교할 문자열\")**\n\n : 일치하는 모든 것을 \"리스트\" 형태로 반환합니다. 말 그대로 리스트로 바꿔주기 때문에 보통은 lst변수를 사용합니다.\n\n\n\n참조 블로그 : https://blog.naver.com/paragonyun/222205019430"}}},{"rowIdx":2074,"cells":{"text":{"kind":"string","value":"/*global Raphael, d3, $, define */\n/*!\n * Diff的兼容性定义\n */\n;(function (name, definition) {\n if (typeof define === 'function') { // Module\n define(definition);\n } else { // Assign to common namespaces or simply the global object (window)\n this[name] = definition(function (id) {\n return this[id];\n });\n }\n})('Diff', function (require) {\n var DataV = require('DataV');\n\n /**\n * 构造函数\n * @param {Object} node 表示在html的哪个容器中绘制该组件\n * @param {Object} options 为用户自定义的组件的属性,比如画布大小\n */\n var Diff = DataV.extend(DataV.Chart, {\n type: \"Diff\",\n initialize: function (node, options) {\n this.node = this.checkContainer(node);\n\n //图的大小设置\n this.defaults.width = 900;\n this.defaults.height = 800;\n\n //设置用户指定的属性\n this.setOptions(options);\n\n //创建画布\n this.createCanvas();\n }\n });\n\n /**\n * 创建画布\n */\n Diff.prototype.createCanvas = function () {\n this.canvas = new Raphael(this.node, this.defaults.width, this.defaults.height);\n };\n\n /**\n * 绘制弦图\n */\n Diff.prototype.render = function () {\n this.layout();\n };\n\n // 计算顺序的相似度\n var diffMap = function (list1, list2) {\n var map = [];\n var hit = 0;\n var lastIndex = -1;\n for (var i = 0; i < list1.length; i++) {\n var index = _.indexOf(list2, list1[i]);\n if (index === -1) {\n continue;\n } else {\n if (index > lastIndex) {\n lastIndex = index;\n map.push([i, index]);\n }\n hit++;\n }\n }\n console.log(map);\n console.log(map.length / list1.length);\n console.log(hit / list1.length);\n return map;\n };\n\n /**\n *对原始数据进行处理\n * @param {Array} table 将要被绘制成饼图的二维表数据\n */\n Diff.prototype.setSource = function (table1, table2) {\n this.rawData = [table1, table2];\n this.diffMap = diffMap(table1, table2);\n };\n\n /**\n *创建chord布局\n */\n Diff.prototype.layout = function () {\n var that = this;\n var canvas = that.canvas;\n\n var paddingLeft = 10;\n var paddingTop = 10;\n var height = 20;\n var distance = 50;\n var width = (this.defaults.width - 2 * paddingLeft - distance) / 2;\n\n for (var j = 0, k = this.rawData.length; j < k; j++) {\n var maped = _.pluck(this.diffMap, j);\n for (var i = 0, l = this.rawData[j].length; i < l; i++) {\n canvas.rect(paddingLeft + j * (width + distance), paddingTop + height * i, width, height).attr({fill: _.indexOf(maped, i) !== -1 ? \"#00ff00\" : \"#ff0000\"});\n canvas.text(paddingLeft + j * (width + distance), paddingTop + height * i + height / 2, this.rawData[j][i]).attr({'text-anchor': 'start'});\n }\n }\n for (var i = 0, l = this.diffMap.length; i < l; i++) {\n var line = this.diffMap[i];\n canvas.path(\"M\" + (paddingLeft + width) + ' ' + (paddingTop + height * line[0] + height / 2) + \"L\" + (paddingLeft + width + distance) + \" \" + (paddingTop + height * line[1] + height / 2)).attr({stroke: '#00ff00'});\n }\n };\n\n return Diff;\n});"}}},{"rowIdx":2075,"cells":{"text":{"kind":"string","value":"var options = function(type, height, numbers , color){\n return { \n chart: {\n height: height,\n width: '100%',\n type: type,\n sparkline: {\n enabled: true\n },\n toolbar: {\n show: false,\n },\n },\n grid: {\n show: false,\n padding: {\n top: 0,\n right: 0,\n bottom: 0,\n left: 0 \n }\n },\n dataLabels: {\n enabled: false\n },\n legend: {\n show: false,\n },\n series: [\n {\n name: \"serie1\",\n data: numbers\n }\n ], \n fill: {\n colors: [color],\n },\n stroke:{\n colors: [color],\n width: 3\n }, \n yaxis: {\n show: false, \n }, \n xaxis: {\n show: false,\n labels: {\n show: false,\n }, \n axisBorder: {\n show: false, \n }, \n tooltip: {\n enabled: false,\n }\n },\n \n };\n}\n \n\n var analytics_1 = document.getElementsByClassName(\"analytics_1\");\n \n if (analytics_1 != null && typeof(analytics_1) != 'undefined') {\n var chart = new ApexCharts(analytics_1[0], options(\"area\" , '51px' , numArr(10,99) , '#4fd1c5')); \n var chart_1 = new ApexCharts(analytics_1[1], options(\"area\" , '51px' , numArr(10,99) , '#4c51bf')); \n chart.render(); \n chart_1.render(); \n }\n\n\n\n \n "}}},{"rowIdx":2076,"cells":{"text":{"kind":"string","value":"using Random\nusing StaticArrays\nusing Test\n\nusing CLIMA\nusing CLIMA.Atmos\nusing CLIMA.GenericCallbacks\nusing CLIMA.ODESolvers\nusing CLIMA.Mesh.Filters\nusing CLIMA.MoistThermodynamics\nusing CLIMA.PlanetParameters\nusing CLIMA.VariableTemplates\n\n# ------------------------ Description ------------------------- #\n# 1) Dry Rising Bubble (circular potential temperature perturbation)\n# 2) Boundaries - `All Walls` : NoFluxBC (Impermeable Walls)\n# Laterally periodic\n# 3) Domain - 2500m[horizontal] x 2500m[horizontal] x 2500m[vertical]\n# 4) Timeend - 1000s\n# 5) Mesh Aspect Ratio (Effective resolution) 1:1\n# 7) Overrides defaults for\n# `forcecpu`\n# `solver_type`\n# `sources`\n# `C_smag`\n# 8) Default settings can be found in `src/Driver/Configurations.jl`\n# ------------------------ Description ------------------------- #\n\nfunction init_risingbubble!(bl, state, aux, (x,y,z), t)\n FT = eltype(state)\n R_gas::FT = R_d\n c_p::FT = cp_d\n c_v::FT = cv_d\n γ::FT = c_p / c_v\n p0::FT = MSLP\n\n xc::FT = 1250\n yc::FT = 1250\n zc::FT = 1000\n r = sqrt((x-xc)^2+(y-yc)^2+(z-zc)^2)\n rc::FT = 500\n θ_ref::FT = 300\n Δθ::FT = 0\n\n if r <= rc\n Δθ = FT(5) * cospi(r/rc/2)\n end\n\n #Perturbed state:\n θ = θ_ref + Δθ # potential temperature\n π_exner = FT(1) - grav / (c_p * θ) * z # exner pressure\n ρ = p0 / (R_gas * θ) * (π_exner)^ (c_v / R_gas) # density\n P = p0 * (R_gas * (ρ * θ) / p0) ^(c_p/c_v) # pressure (absolute)\n T = P / (ρ * R_gas) # temperature\n ρu = SVector(FT(0),FT(0),FT(0))\n\n #State (prognostic) variable assignment\n e_kin = FT(0)\n e_pot = grav * z\n ρe_tot = ρ * total_energy(e_kin, e_pot, T)\n state.ρ = ρ\n state.ρu = ρu\n state.ρe = ρe_tot\n state.moisture.ρq_tot = FT(0)\nend\n\nfunction config_risingbubble(FT, N, resolution, xmax, ymax, zmax)\n\n # Boundary conditions\n bc = NoFluxBC()\n\n # Choose explicit solver\n ode_solver = CLIMA.ExplicitSolverType(solver_method=LSRK144NiegemannDiehlBusch)\n\n # Set up the model\n C_smag = FT(0.23)\n model = AtmosModel{FT}(AtmosLESConfiguration;\n turbulence=SmagorinskyLilly{FT}(C_smag),\n source=(Gravity(),),\n init_state=init_risingbubble!)\n\n # Problem configuration\n config = CLIMA.Atmos_LES_Configuration(\"DryRisingBubble\",\n N, resolution, xmax, ymax, zmax,\n init_risingbubble!,\n solver_type=ode_solver,\n model=model)\n return config\nend\n\nfunction main()\n CLIMA.init()\n\n # Working precision\n FT = Float64\n # DG polynomial order\n N = 4\n # Domain resolution and size\n Δh = FT(50)\n Δv = FT(50)\n resolution = (Δh, Δh, Δv)\n # Domain extents\n xmax = 2500\n ymax = 2500\n zmax = 2500\n # Simulation time\n t0 = FT(0)\n timeend = FT(1000)\n # Courant number\n CFL = FT(0.8)\n\n driver_config = config_risingbubble(FT, N, resolution, xmax, ymax, zmax)\n solver_config = CLIMA.setup_solver(t0, timeend, driver_config, forcecpu=true, Courant_number=CFL)\n\n # User defined filter (TMAR positivity preserving filter)\n cbtmarfilter = GenericCallbacks.EveryXSimulationSteps(1) do (init=false)\n Filters.apply!(solver_config.Q, 6, solver_config.dg.grid, TMARFilter())\n nothing\n end\n\n # Invoke solver (calls solve! function for time-integrator)\n result = CLIMA.invoke!(solver_config;\n user_callbacks=(cbtmarfilter,),\n check_euclidean_distance=true)\n\n @test isapprox(result,FT(1); atol=1.5e-3)\nend\n\nmain()\n"}}},{"rowIdx":2077,"cells":{"text":{"kind":"string","value":"namespace Evolution.Textkernel\n{\n using Evolution.Textkernel.Models;\n using System.Threading.Tasks;\n\n /// Interface for Textkernel's CV Extract parser.\n public interface ITextkernelParser\n {\n /// Send the file bytes to the service and get back the deserialised result.\n /// The CV file to parse.\n /// Optional name of the file.\n /// The deserialised result.\n Task Parse(byte[] file, string filename = null);\n }\n}"}}},{"rowIdx":2078,"cells":{"text":{"kind":"string","value":"(ns metro.components.web.health\n (:require [clojure.data.json :as json]\n [metro.components.server.pedestal :as server]\n [metro.components.db.postgres :as postgres]))\n\n(defn health [request]\n {:status 200\n :headers {\"Content-Type\" \"application/json\"}\n :body (json/write-str {:server @server/status\n :database @postgres/status})})"}}},{"rowIdx":2079,"cells":{"text":{"kind":"string","value":"import React, { Component } from 'react'\nimport ConcertList from '../components/ConcertList'\nimport DeleteArtist from '../components/DeleteArtist'\n\nclass CurrentArtist extends Component {\n\n componentDidMount() {\n this.props.getConcerts(this.props.currentArtist.id)\n }\n\n componentDidUpdate(prevProps) {\n if(this.props.currentArtist !== prevProps.currentArtist) {\n this.props.getConcerts(this.props.currentArtist.id)\n }\n }\n\n\n\n render() {\n let view\n\n return (\n
\n\n

\n
\n
\n

{this.props.currentArtist.name}


\n

\n
\n
\n
\n \n
\n
\n
\n )\n }\n}\nexport default CurrentArtist\n"}}},{"rowIdx":2080,"cells":{"text":{"kind":"string","value":"middleware('auth', ['except' => ['/home', '/lala']]);\n $this->middleware('auth');\n }\n\n public function lista() {\n\n \t/*$produtos = DB::select('SELECT * FROM produtos');\n\n if(view()->exists('produto.listagem')) { \n return view('produto/listagem')->with('produtos', $produtos);\n return view('listagem', ['produtos' => $produtos]);\n return view('listagem')->withProdutos($produtos);//magic methods \n }\n else {\n return view('welcome');\n }*/\n\n $produtos = Produto::all();\n return view('produto/listagem')->with('produtos', $produtos);\n }\n \n //?id=1\n public function mostra(Request $request) {\n \n /*$id = $request->route('id');\n $produto = DB::select('SELECT * FROM produtos WHERE id = ?', [$id]);*/\n \n if($request->has('id')) {//verifica se um parâmetro foi informado\n $id = $request->input('id');\n //$produto = DB::select('SELECT * FROM produtos WHERE id = ?', [$id]);\n $produto = Produto::find($id);\n }\n else {\n return \"Informe um id.\";\n } \n \n if(empty($produto)) {\n return \"Este produto não existe.\";\n }\n return view('produto/detalhes')->with('p', $produto);\n }\n\n public function novo() {\n return view('produto/formulario');\n }\n\n \n public function adiciona(ProdutoRequest $req) {\n\n /*\n $nome = $req->input('nome');\n $desc = $req->input('descricao');\n $valor = $req->input('valor');\n $qtd = $req->input('quantidade');\n\n DB::insert('INSERT INTO produtos (nome, quantidade, valor, descricao) VALUES (?, ?, ?, ?)',\n array($nome, $qtd, $valor, $desc));\n\n //return redirect('/produtos')->withInput();//envia tudo\n //return redirect('/produtos')->withInput($req->only('nome'));\n return redirect()->action('ProdutoController@lista')->withInput();*/\n\n //outra forma de fazer\n /*$params = $req->all();\n $produto = new Produto($params);\n $produto->save();*/\n\n //mais uma forma de fazer\n\n Produto::create($req->all());\n\n return redirect()->action('ProdutoController@lista')->withInput();\n }\n\n //passando com a barra /1\n public function remove($id) {\n \n $produto = Produto::find($id);\n $produto->delete();\n\n return redirect()->action('ProdutoController@lista'); \n }\n\n //poderia ter usado o método mostrar, mas para deixar um exemplo de como fazer das maneiras vou usar esse\n public function editar($id) {\n \n $produto = Produto::find($id);\n\n return view('produto/atualiza')->with('p', $produto);\n }\n\n public function atualiza($id, Request $req) {\n \n $produto = Produto::findOrFail($id);\n $params = $req->all();\n $produto->fill($params)->save();\n\n return redirect()->action('ProdutoController@lista');\n }\n}\n\n\n/* exemplo\n public function __construct(Request $request) {\n $this->request = $request;\n }\n\n public function checkText() {\n $txt = $this->request->has('txt'); \n return $txt;\n }\n*/"}}},{"rowIdx":2081,"cells":{"text":{"kind":"string","value":"package org.zstack.header.identity;\r\n\r\nimport org.zstack.header.query.ExpandedQueries;\r\nimport org.zstack.header.query.ExpandedQuery;\r\nimport org.zstack.header.search.Inventory;\r\n\r\nimport java.sql.Timestamp;\r\nimport java.util.ArrayList;\r\nimport java.util.Collection;\r\nimport java.util.List;\r\n\r\n@Inventory(mappingVOClass = UserGroupUserRefVO.class)\r\n@ExpandedQueries({\r\n @ExpandedQuery(expandedField = \"user\", inventoryClass = UserInventory.class,\r\n foreignKey = \"userUuid\", expandedInventoryKey = \"uuid\"),\r\n @ExpandedQuery(expandedField = \"group\", inventoryClass = UserGroupInventory.class,\r\n foreignKey = \"groupUuid\", expandedInventoryKey = \"uuid\")\r\n})\r\npublic class UserGroupUserRefInventory {\r\n private String userUuid;\r\n private String groupUuid;\r\n private Timestamp createDate;\r\n private Timestamp lastOpDate;\r\n \r\n public static UserGroupUserRefInventory valueOf(UserGroupUserRefVO vo) {\r\n UserGroupUserRefInventory inv = new UserGroupUserRefInventory();\r\n inv.setUserUuid(vo.getUserUuid());\r\n inv.setGroupUuid(vo.getGroupUuid());\r\n inv.setCreateDate(vo.getCreateDate());\r\n inv.setLastOpDate(vo.getLastOpDate());\r\n return inv;\r\n }\r\n\r\n public static List valueOf(Collection vos) {\r\n List invs = new ArrayList();\r\n for (UserGroupUserRefVO vo : vos) {\r\n invs.add(valueOf(vo));\r\n }\r\n return invs;\r\n }\r\n \r\n public String getUserUuid() {\r\n return userUuid;\r\n }\r\n\r\n public void setUserUuid(String userUuid) {\r\n this.userUuid = userUuid;\r\n }\r\n\r\n public String getGroupUuid() {\r\n return groupUuid;\r\n }\r\n\r\n public void setGroupUuid(String groupUuid) {\r\n this.groupUuid = groupUuid;\r\n }\r\n\r\n public Timestamp getCreateDate() {\r\n return createDate;\r\n }\r\n\r\n public void setCreateDate(Timestamp createDate) {\r\n this.createDate = createDate;\r\n }\r\n\r\n public Timestamp getLastOpDate() {\r\n return lastOpDate;\r\n }\r\n\r\n public void setLastOpDate(Timestamp lastOpDate) {\r\n this.lastOpDate = lastOpDate;\r\n }\r\n}\r\n"}}},{"rowIdx":2082,"cells":{"text":{"kind":"string","value":"rootProject.name = \"PortalClosers\"\n\ninclude(\":core\")\nproject(\":core\").projectDir = File(\"game/core\")\n\ninclude(\":android\")\nproject(\":android\").projectDir = File(\"game/android\")\n\ninclude(\":desktop\")\nproject(\":desktop\").projectDir = File(\"game/desktop\")\n\ninclude(\":headless\")\nproject(\":headless\").projectDir = File(\"game/headless\")\n\nincludeBuild(\"engine/gradle-plugins\")"}}},{"rowIdx":2083,"cells":{"text":{"kind":"string","value":"package v1\n\nimport (\n\t\"github.com/Gavazn/Gavazn/internal/category\"\n\t\"github.com/Gavazn/Gavazn/internal/comment\"\n\t\"github.com/Gavazn/Gavazn/internal/post\"\n\t\"github.com/Gavazn/Gavazn/internal/user\"\n\t\"github.com/labstack/echo\"\n\t\"go.mongodb.org/mongo-driver/bson\"\n)\n\n/**\n * @api {get} /api/v2/dashboard get statistics\n * @apiVersion 1.0.0\n * @apiName getStatistics\n * @apiGroup Dashboard\n *\n * @apiSuccess {Number} total_posts\n * @apiSuccess {Number} total_categories\n * @apiSuccess {Number} total_comments\n * @apiSuccess {Number} total_users\n *\n * @apiError {String} error api error message\n */\nfunc getStatistics(ctx echo.Context)error{\n\treturn ctx.JSON(200, echo.Map{\n\t\t\"total_posts\": post.Count(bson.M{}),\n\t\t\"total_categories\": category.Count(bson.M{}),\n\t\t\"total_comments\": comment.Count(bson.M{}),\n\t\t\"total_users\": user.Count(bson.M{}),\n\t})\n}"}}},{"rowIdx":2084,"cells":{"text":{"kind":"string","value":"\nfunction addServicesList(name, file) {\n angular.bootstrap(document.getElementById(\"divWrap\"), ['useApp']);\n var list = RepeatObj.addList(name, file, function () {\n showService(name);\n function showService(name) {\n console.log(name + ' initialize(); complete');\n RepeatObj.useList.title = name;\n RepeatObj.useList.setActions({\n entry: {\n Action: 'Done'\n }\n });\n RepeatObj.useList.initialize(name, true);\n }\n });\n return (list);\n}\nfunction ToolbarObj (tempid, key) {\n var listobj = ListDataObj(tempid);\n listobj.DataKey = key;\n listobj.addReferences = addReferences;\n listobj.listobj_processData = listobj.processData;\n listobj.processDatax = function (data) {\n listobj.listobj_processData(data);\n if ( typeof (listobj.addReferences) === 'undefined') { } else {\n listobj.addReferences();\n delete (listobj.addReferences);\n }\n }\n return (listobj);\n}\nfunction addListObj(tempid, jsonfilename, readyfunc) {\n var funcname = 'RepeatObj.addList()';\n var listobj = null;\n if ((listobj = ToolbarObj(tempid, RepeatObj.getDataKey(jsonfilename))) == null) {\n console.log(funcname + '(); Error in ' + funcname + '; unable to create ListObj');\n } else {\n console.log(funcname + '(); NEW [' + tempid + '] ListObj[' + listobj.DataKey + ']');\n }\n RepeatObj.addListObj(listobj, tempid, jsonfilename, readyfunc);\n return (listobj);\n}\nfunction addServices() {\n //restoreTemp(addServicesList);\n return (addServicesList('Service', '/data/Service.json'));\n}\n\n\n"}}},{"rowIdx":2085,"cells":{"text":{"kind":"string","value":"import React from 'react'\nimport { graphql } from 'gatsby'\n\nimport Layout from '../components/Layout'\nimport { useSiteMetadata } from '../hooks'\nimport WebmentionReplies from '../components/Webmention/WebmentionFeed'\nimport { Helmet } from 'react-helmet/es/Helmet'\n\nconst PageTemplate = ({ data }) => {\n const { title: siteTitle } = useSiteMetadata()\n const { html: pageBody, frontmatter, fields } = data.markdownRemark\n\n return (\n \n \n \n \n
\n
{frontmatter.title}
\n
\n \n )\n}\n\nexport default PageTemplate\n\nexport const pageQuery = graphql`\n query PageBySlug($slug: String!) {\n site {\n siteMetadata {\n title\n author {\n name\n }\n }\n }\n markdownRemark(fields: { slug: { eq: $slug } }) {\n id\n excerpt\n html\n fields {\n slug\n }\n frontmatter {\n title\n date(formatString: \"MMMM DD, YYYY\")\n categories\n tags\n description\n }\n }\n }\n`"}}},{"rowIdx":2086,"cells":{"text":{"kind":"string","value":"from rest_framework.permissions import BasePermission\n\n\nclass HasValidToken(BasePermission):\n \"\"\"Has Valid Token\n\n Checks if the request has a valid token,\n even if the user is not authenticated\n \"\"\"\n\n def has_permission(self, request, view):\n return bool(getattr(request, \"auth\", False))\n"}}},{"rowIdx":2087,"cells":{"text":{"kind":"string","value":"package org.example.quiz.service\n\nimport cats.effect.{ContextShift, IO}\nimport org.example.quiz.dao.QuestionAnswerDao\nimport org.example.quiz.entities._\n\nclass QuizService(dao: QuestionAnswerDao,\n categoryService: CategoryService)\n (implicit cs: ContextShift[IO]) {\n\n private val numberOfQuestions = 10\n\n def generate(categoryId: Long): IO[Option[QuizEntity]] =\n categoryService.get(categoryId).flatMap {\n case Some(category) =>\n pickQuestions(category, numberOfQuestions).map(qs =>\n Some(QuizEntity(qs)))\n case None => IO(None)\n }\n\n private def pickQuestions(category: CategoryEntity,\n n: Int): IO[List[QuestionEntity]] = {\n val randomQAs = IO.fromFuture(IO(dao.pickByCategoryId(category.id, n = n)))\n randomQAs.map { qas =>\n qas.map { case (q, as) => QuestionEntity.fromRecord(q, as) }.toList\n }\n }\n\n def score(givenAnswers: List[GivenAnswerEntity]): IO[ScoreEntity] = {\n val questionIds = givenAnswers.map(_.questionId)\n IO.fromFuture(IO(dao.getCorrectQuestionAnswers(questionIds))).map { correctAnswers =>\n val goodAnswers = givenAnswers.filter { answer =>\n correctAnswers.exists { case (q, a) =>\n q == answer.questionId && a == answer.answerId\n }\n }\n val badAnswers = givenAnswers.diff(goodAnswers)\n val score = 1.0 * goodAnswers.size / givenAnswers.size\n ScoreEntity(score, correct = goodAnswers, wrong = badAnswers)\n }\n }\n}\n"}}},{"rowIdx":2088,"cells":{"text":{"kind":"string","value":"package aws\n\nimport (\n\t\"fmt\"\n\t\"net/url\"\n\t\"regexp\"\n\t\"strings\"\n\n\t\"github.com/aws/aws-sdk-go/aws\"\n\t\"github.com/aws/aws-sdk-go/aws/awserr\"\n\t\"github.com/aws/aws-sdk-go/service/iam\"\n\n\t\"github.com/hashicorp/terraform/helper/schema\"\n)\n\nfunc resourceAwsIamRolePolicy() *schema.Resource {\n\treturn &schema.Resource{\n\t\t// PutRolePolicy API is idempotent, so these can be the same.\n\t\tCreate: resourceAwsIamRolePolicyPut,\n\t\tUpdate: resourceAwsIamRolePolicyPut,\n\n\t\tRead: resourceAwsIamRolePolicyRead,\n\t\tDelete: resourceAwsIamRolePolicyDelete,\n\t\tImporter: &schema.ResourceImporter{\n\t\t\tState: schema.ImportStatePassthrough,\n\t\t},\n\n\t\tSchema: map[string]*schema.Schema{\n\t\t\t\"policy\": &schema.Schema{\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t},\n\t\t\t\"name\": &schema.Schema{\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tForceNew: true,\n\t\t\t\tValidateFunc: func(v interface{}, k string) (ws []string, errors []error) {\n\t\t\t\t\t// https://github.com/boto/botocore/blob/2485f5c/botocore/data/iam/2010-05-08/service-2.json#L8291-L8296\n\t\t\t\t\tvalue := v.(string)\n\t\t\t\t\tif len(value) > 128 {\n\t\t\t\t\t\terrors = append(errors, fmt.Errorf(\n\t\t\t\t\t\t\t\"%q cannot be longer than 128 characters\", k))\n\t\t\t\t\t}\n\t\t\t\t\tif !regexp.MustCompile(\"^[\\\\w+=,.@-]+$\").MatchString(value) {\n\t\t\t\t\t\terrors = append(errors, fmt.Errorf(\n\t\t\t\t\t\t\t\"%q must match [\\\\w+=,.@-]\", k))\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t},\n\t\t\t},\n\t\t\t\"role\": &schema.Schema{\n\t\t\t\tType: schema.TypeString,\n\t\t\t\tRequired: true,\n\t\t\t\tForceNew: true,\n\t\t\t},\n\t\t},\n\t}\n}\n\nfunc resourceAwsIamRolePolicyPut(d *schema.ResourceData, meta interface{}) error {\n\tiamconn := meta.(*AWSClient).iamconn\n\n\trequest := &iam.PutRolePolicyInput{\n\t\tRoleName: aws.String(d.Get(\"role\").(string)),\n\t\tPolicyName: aws.String(d.Get(\"name\").(string)),\n\t\tPolicyDocument: aws.String(d.Get(\"policy\").(string)),\n\t}\n\n\tif _, err := iamconn.PutRolePolicy(request); err != nil {\n\t\treturn fmt.Errorf(\"Error putting IAM role policy %s: %s\", *request.PolicyName, err)\n\t}\n\n\td.SetId(fmt.Sprintf(\"%s:%s\", *request.RoleName, *request.PolicyName))\n\treturn nil\n}\n\nfunc resourceAwsIamRolePolicyRead(d *schema.ResourceData, meta interface{}) error {\n\tiamconn := meta.(*AWSClient).iamconn\n\n\trole, name, err := resourceAwsIamRolePolicyParseId(d.Id())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\trequest := &iam.GetRolePolicyInput{\n\t\tPolicyName: aws.String(name),\n\t\tRoleName: aws.String(role),\n\t}\n\n\tgetResp, err := iamconn.GetRolePolicy(request)\n\tif err != nil {\n\t\tif iamerr, ok := err.(awserr.Error); ok && iamerr.Code() == \"NoSuchEntity\" { // XXX test me\n\t\t\td.SetId(\"\")\n\t\t\treturn nil\n\t\t}\n\t\treturn fmt.Errorf(\"Error reading IAM policy %s from role %s: %s\", name, role, err)\n\t}\n\n\tif getResp.PolicyDocument == nil {\n\t\treturn fmt.Errorf(\"GetRolePolicy returned a nil policy document\")\n\t}\n\n\tpolicy, err := url.QueryUnescape(*getResp.PolicyDocument)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := d.Set(\"policy\", policy); err != nil {\n\t\treturn err\n\t}\n\tif err := d.Set(\"name\", name); err != nil {\n\t\treturn err\n\t}\n\treturn d.Set(\"role\", role)\n}\n\nfunc resourceAwsIamRolePolicyDelete(d *schema.ResourceData, meta interface{}) error {\n\tiamconn := meta.(*AWSClient).iamconn\n\n\trole, name, err := resourceAwsIamRolePolicyParseId(d.Id())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\trequest := &iam.DeleteRolePolicyInput{\n\t\tPolicyName: aws.String(name),\n\t\tRoleName: aws.String(role),\n\t}\n\n\tif _, err := iamconn.DeleteRolePolicy(request); err != nil {\n\t\treturn fmt.Errorf(\"Error deleting IAM role policy %s: %s\", d.Id(), err)\n\t}\n\treturn nil\n}\n\nfunc resourceAwsIamRolePolicyParseId(id string) (roleName, policyName string, err error) {\n\tparts := strings.SplitN(id, \":\", 2)\n\tif len(parts) != 2 {\n\t\terr = fmt.Errorf(\"role_policy id must be of the for :\")\n\t\treturn\n\t}\n\n\troleName = parts[0]\n\tpolicyName = parts[1]\n\treturn\n}\n"}}},{"rowIdx":2089,"cells":{"text":{"kind":"string","value":"function Invoke-AmoebaMLPipeline {\n [CmdletBinding()]\n param(\n [Parameter(Mandatory=$True,ValueFromPipeline=$True)]\n [ref]$Pipeline,\n [Parameter(Mandatory)]\n [Type]$DataSet,\n [Parameter(Mandatory)]\n [Type]$Predictor\n \n )\n Write-Verbose (\"{0}: {1}\" -f $MyInvocation.MyCommand, \"Training Data\");\n $Method = [Microsoft.ML.LearningPipeline].GetMethod(\"Train\").MakeGenericMethod([Type]$DataSet,[Type]$Predictor)\n try {\n return $Method.Invoke($Pipeline.Value, $null)\n }\n catch {\n write-host ( @(\n $_\n \n $_.exception.InnerException.StackTrace\n $_.exception\n $_.exception.InnerException\n $_.exception.InnerException.InnerException\n $_.exception.InnerException.InnerException.InnerException\n $_.exception.InnerException.InnerException.InnerException.InnerException\n ) | out-string )\n }\n}"}}},{"rowIdx":2090,"cells":{"text":{"kind":"string","value":"namespace :doc do\n \n desc \"Generate the documentation\"\n task :yard do\n puts \"Generating YARD documentation\"\n system(File.join(\"..\", \"build\", \"run\"), \"doc:yardoc\")\n end\n\n\n desc \"Create the API.md file\"\n task :api do\n require 'erb'\n require 'sinatra'\n require 'jsonmodel'\n require_relative '../backend/app/lib/rest.rb'\n require_relative '../backend/app/lib/username.rb'\n require_relative '../backend/app/model/backend_enum_source.rb'\n require_relative '../backend/app/lib/logging.rb'\n require_relative '../backend/app/lib/streaming_import.rb'\n require_relative '../backend/app/lib/component_transfer.rb'\n require_relative '../backend/app/lib/reports/report_helper.rb'\n\n\n class ArchivesSpaceService < Sinatra::Base\n \n def self.helpers\n nil\n end\n \n include RESTHelpers\n\n end\n \n @time = Time.new\n\n JSONModel::init(:enum_source => BackendEnumSource)\n\n require_relative '../backend/app/lib/export'\n\n Dir.glob(File.dirname(__FILE__) + '/../backend/app/controllers/*.rb') {|file| require file unless file =~ /system/}\n\n @endpoints = ArchivesSpaceService::Endpoint.all.sort{|a,b| a[:uri] <=> b[:uri]}\n @examples = JSON.parse( IO.read File.dirname(__FILE__) + \"/../endpoint_examples.json\" )\n\n\n erb = ERB.new(File.read('API.erb'), nil, '<>')\n\n File.open('../API.md', 'w') do |f|\n f.write erb.result(binding)\n end\n\n end\n \n desc 'Rename the YARD index file to avoid problems with Jekyll'\n task :rename_index do\n Dir.chdir('../') do\n files = Dir.glob('doc/**/*')\n files.each do |f|\n if File::file?(f)\n content = File.read(f)\n content.gsub!('\"_index.html\"', '\"alpha_index.html\"')\n content.gsub!('/_index.html', '/alpha_index.html')\n File.open(f, \"w\") do |io|\n io.write content\n end\n end\n end\n `mv doc/_index.html doc/alpha_index.html`\n end\n end\n \n desc 'This generates all documentation and publishes it to the doc folder'\n task :gen do\n Rake::Task[\"doc:api\"].invoke\n Rake::Task[\"doc:yard\"].invoke\n # Rake::Task[\"doc:yard-txt\"].invoke\n Rake::Task[\"doc:rename_index\"].invoke\n end\n \n \nend\n"}}},{"rowIdx":2091,"cells":{"text":{"kind":"string","value":"//index.js\n//获取应用实例\nPage({\n onShareAppMessage(): object {\n return {\n title: 'Oak Weui Avatar 头像',\n path: '/pages/avatar/avatar',\n }\n },\n})\n"}}},{"rowIdx":2092,"cells":{"text":{"kind":"string","value":"module model_module\n\n use amrex_fort_module, only : rt => amrex_real\n implicit none\n\ncontains\n\n subroutine get_model_size(ymin, ymax, dy, lo, hi)\n \n use amrex_fort_module, only : rt => amrex_real\n real(rt) , intent(in) :: ymin, ymax, dy\n integer, intent(out) :: lo, hi\n \n integer :: npts\n\n ! number of points in the domain\n npts = (ymax - ymin)/dy + 1\n \n ! we'll do some ghost cells, for the boundary conditions\n ! by design, the base of the model will be at zone 0\n lo = -4\n hi = npts + 4\n\n end subroutine get_model_size\n\n subroutine get_model(ymin, ymax, dy, &\n pres_base, dens_base, do_isentropic, &\n xn_model, &\n r_model, rho_model, T_model, e_model, p_model, &\n lo, hi)\n\n use eos_module, only : eos\n use eos_type_module, only : eos_t, eos_input_rp\n use network, only : nspec\n use meth_params_module, only: const_grav\n\n use amrex_fort_module, only : rt => amrex_real\n integer, intent(in) :: lo, hi\n real(rt) , intent(in) :: ymin, ymax, dy\n real(rt) , intent(in) :: pres_base, dens_base\n logical, intent(in) :: do_isentropic\n real(rt) , intent(in) :: xn_model(nspec)\n\n real(rt) , intent(out) :: r_model(lo:hi)\n real(rt) , intent(out) :: rho_model(lo:hi)\n real(rt) , intent(out) :: T_model(lo:hi)\n real(rt) , intent(out) :: e_model(lo:hi)\n real(rt) , intent(out) :: p_model(lo:hi)\n\n real(rt) :: H, gamma_const\n \n integer :: j\n\n type (eos_t) :: eos_state\n\n ! compute the pressure scale height (for an isothermal, ideal-gas\n ! atmosphere)\n H = pres_base / dens_base / abs(const_grav)\n\n ! create the constant if we are isentropic\n eos_state % rho = dens_base\n eos_state % p = pres_base\n eos_state % xn(:) = xn_model(:)\n\n ! initial guess\n eos_state % T = 1000.0e0_rt\n\n call eos(eos_input_rp, eos_state)\n\n gamma_const = pres_base/(dens_base * eos_state % e) + 1.0e0_rt\n\n\n rho_model(0) = dens_base\n p_model(0) = pres_base\n\n r_model(0) = ymin + 0.5e0_rt*dy\n\n ! integrate up from the base\n do j = 1, hi\n\n r_model(j) = ymin + (dble(j)+0.5e0_rt)*dy\n\n if (do_isentropic) then\n rho_model(j) = dens_base*(const_grav*dens_base*(gamma_const - 1.0)* &\n (r_model(j)-r_model(0))/ &\n (gamma_const*pres_base) + 1.e0_rt)**(1.e0_rt/(gamma_const - 1.e0_rt))\n else\n rho_model(j) = dens_base * exp(-(r_model(j)-r_model(0))/H)\n endif\n\n p_model(j) = p_model(j-1) - &\n dy * 0.5e0_rt * (rho_model(j)+rho_model(j-1)) * abs(const_grav)\n \n enddo\n\n ! integrate down from the base\n do j = -1, lo, -1\n \n r_model(j) = ymin + (dble(j)+0.5e0_rt)*dy\n\n if (do_isentropic) then\n rho_model(j) = dens_base*(const_grav*dens_base*(gamma_const - 1.0)* &\n (r_model(j)-r_model(0))/ &\n (gamma_const*pres_base) + 1.e0_rt)**(1.e0_rt/(gamma_const - 1.e0_rt))\n else\n rho_model(j) = dens_base * exp(-(r_model(j)-r_model(0))/H)\n endif\n\n p_model(j) = p_model(j+1) + &\n dy * 0.5e0_rt * (rho_model(j)+rho_model(j+1)) * abs(const_grav)\n \n enddo\n \n ! thermodynamics\n do j = lo, hi\n eos_state % rho = rho_model(j)\n eos_state % p = p_model(j)\n eos_state % xn(:) = xn_model(:)\n\n ! initial guess\n eos_state % T = 1000.0e0_rt\n\n call eos(eos_input_rp, eos_state)\n\n e_model(j) = eos_state % e\n T_model(j) = eos_state % T\n end do\n\n end subroutine get_model\n\nend module model_module\n"}}},{"rowIdx":2093,"cells":{"text":{"kind":"string","value":"export default {\n control: {\n opacity: 1,\n backgroundColor: \"#242424\"\n },\n\n button: {\n backgroundColor: \"#332A00\"\n },\n\n buttonLabel: {\n color: \"#FBD89B\"\n },\n\n disabledButtonLabel: {\n color: \"#4F493D\",\n fontSize: 20\n },\n\n cancelButton: {\n backgroundColor: \"#5B0000\",\n },\n\n cancelButtonLabel: {\n color: \"#A49A8C\"\n },\n\n title: {\n color: \"#BDC5CE\"\n },\n\n message: {\n color: \"#7F7F7F\"\n }\n};\n"}}},{"rowIdx":2094,"cells":{"text":{"kind":"string","value":"// JVM_TARGET: 1.8\n// KOTLIN_CONFIGURATION_FLAGS: +JVM.JVM8_TARGET_WITH_DEFAULTS\n\ninterface Test {\n fun test(): String {\n return \"OK\"\n }\n\n fun testAbstract(): String\n}\n\n// TESTED_OBJECT_KIND: function\n// TESTED_OBJECTS: Test, test\n// FLAGS: ACC_PUBLIC\n\n// TESTED_OBJECT_KIND: function\n// TESTED_OBJECTS: Test, testAbstract\n// FLAGS: ACC_PUBLIC, ACC_ABSTRACT\n"}}},{"rowIdx":2095,"cells":{"text":{"kind":"string","value":"require \"active_support/dependencies\"\nmodule ActiveMailer\n # Our host application root path\n # We set this when the engine is initialized\n mattr_accessor :app_root\n\n # Yield self on setup for nice config blocks\n def self.setup\n yield self\n end\nend\n\nrequire \"active_mailer/engine\"\n\nActiveRecord::ConnectionAdapters::SchemaStatements.module_eval do\n def create_active_mailer_table(*args, &block)\n block_with_default_columns = Proc.new do |t|\n t.integer :sender_id\n t.timestamp :sent_at\n t.string :subject\n block.call(t)\n end\n \n create_table(*args, &block_with_default_columns)\n end \nend \n"}}},{"rowIdx":2096,"cells":{"text":{"kind":"string","value":"getPdo();\n// dd($pdo);\n return view('admin.index');\n }//\n public function info(){\n return view('admin.info');\n }\n public function element(){\n return view('admin.element');\n }\n\n// public function map()\n// {\n// $loca=Real::all();\n//// dd($loca);\n//\n// return view('admin.map')->with('data',$loca);\n// }\n\n public function pass(){\n if ($input = Input::all()){\n $rules=[\n 'password'=>'required|between:6,20|confirmed',\n ];\n $message=[\n 'password.required'=>'新密码不能为空!',\n 'password.between'=>'新密码长度必须在6到20位之间!',\n 'password.confirmed'=>'新密码与确认密码不匹配!'\n\n ];\n $validator= Validator::make($input,$rules,$message);\n if ($validator->passes()){\n $user=User::where('user_name','=',session('user.user_name'))->first();\n $_password = Crypt::decrypt($user->user_password);\n// dd($_password);\n if ($input['password_o']==$_password){\n $user->user_password = Crypt::encrypt($input['password']);\n $user->update();\n// dd($user->user_password);\n\n//\n return back()->withErrors(['errors'=>'密码修改成功啦!']);\n }\n else{\n return back()->withErrors(['errors'=>'原密码错误!']);\n// return back();\n }\n }\n else{\n// return back()->withErrors($validator);\n// dd($validator->errors()->all());\n return back()\n ->withErrors($validator);\n }\n }\n else{\n return view('admin.pass');\n }\n }\n}\n\n\n"}}},{"rowIdx":2098,"cells":{"text":{"kind":"string","value":"package my.sample\n\nclass A\n\nfun A.check() {}\n\nfun test() {\n val a = A()\n a.check()\n A().check()\n}"}}},{"rowIdx":2099,"cells":{"text":{"kind":"string","value":"fn = normpath(joinpath(dirname(@__FILE__),\"center_sizes.jld2\"))\nd = JLD2.jldopen(fn,\"r\")\n\ntmp = d[\"ctrs\"]\nctrs = [SVector(q...) for q in tmp]\nrads = d[\"rads\"]\n\ntree = CD.Octree(ctrs, rads)\n\n# extract all the triangles that (potentially) intersect octant (+,+,+)\npred(i) = all(ctrs[i].+rads[i] .> 0)\nbb = SVector(0.5, 0.5, 0.5), 0.5\nids = collect(CD.searchtree(pred, tree, bb))\n@test length(ids) == 178\n\n\nN = 100\nusing DelimitedFiles\nbuf = readdlm(joinpath(@__DIR__,\"assets\",\"ctrs.csv\"))\nctrs = vec(collect(reinterpret(SVector{3,Float64}, buf')))\nrads = vec(readdlm(joinpath(@__DIR__,\"assets\",\"rads.csv\")))\n\ntree = CD.Octree(ctrs, rads)\npred(i) = all(ctrs[i] .+ rads[i] .> 0)\nbb = @SVector[0.5, 0.5, 0.5], 0.5\nids = collect(CD.searchtree(pred, tree, bb))\n\n@show ids\nids2 = findall(i -> all(ctrs[i].+rads[i] .> 0), 1:N)\n@test length(ids2) == length(ids)\n@test sort(ids2) == sort(ids)\n@test ids == [26, 46, 54, 93, 34, 94, 75, 23, 86, 57, 44, 40, 67, 73, 77, 80]\n"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":20,"numItemsPerPage":100,"numTotalItems":43696,"offset":2000,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NzE2MzkzNCwic3ViIjoiL2RhdGFzZXRzL1ppaGFvLUxpL0NvZGUiLCJleHAiOjE3NTcxNjc1MzQsImlzcyI6Imh0dHBzOi8vaHVnZ2luZ2ZhY2UuY28ifQ.MFQT-rQHhz6maHY385Jc3E65rz4Zalv07SjK8mRA3oPEDeCN-lR2xk_1ecxVJv5-FrZbDHXOKDwRdXdipXb_Bg","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
text
stringlengths
27
775k
@file:Suppress("NOTHING_TO_INLINE") package com.soywiz.kds import java.util.* actual class WeakMap<K : Any, V> { val wm = WeakHashMap<K, V>() actual operator fun contains(key: K): Boolean = wm.containsKey(key) actual operator fun set(key: K, value: V) = run { if (key is String) error("Can't use String as WeakMap keys") wm[key] = value } actual operator fun get(key: K): V? = wm[key] } ///////////////// actual class FastIdentityMap<K, V>(dummy: Boolean) { val map = IdentityHashMap<K, V>() } actual fun <K, V> FastIdentityMap(): FastIdentityMap<K, V> = FastIdentityMap(true) actual val <K, V> FastIdentityMap<K, V>.size: Int get() = this.map.size actual fun <K, V> FastIdentityMap<K, V>.keys(): List<K> = this.map.keys.toList() actual operator fun <K, V> FastIdentityMap<K, V>.get(key: K): V? = this.map[key] actual operator fun <K, V> FastIdentityMap<K, V>.set(key: K, value: V): Unit = run { this.map[key] = value } actual operator fun <K, V> FastIdentityMap<K, V>.contains(key: K): Boolean = key in this.map actual fun <K, V> FastIdentityMap<K, V>.remove(key: K): Unit = run { this.map.remove(key) } actual fun <K, V> FastIdentityMap<K, V>.clear() = this.map.clear() actual inline fun <K, V> FastIdentityMap<K, V>.fastKeyForEach(callback: (key: K) -> Unit): Unit { for (key in this.keys()) { callback(key) } }
using DigitalRune.Geometry; using DigitalRune.Graphics.Scene3D; using DigitalRune.Mathematics; using DigitalRune.Mathematics.Algebra; using NUnit.Framework; namespace DigitalRune.Graphics.Tests { [TestFixture] public class CameraInstanceTest { [Test] public void PoseTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Assert.IsNotNull(cameraInstance.PoseWorld); Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); // Set new Pose Vector3F position = new Vector3F(1, 2, 3); QuaternionF orientation = QuaternionF.CreateRotation(new Vector3F(3, 4, 5), 0.123f); cameraInstance.PoseWorld = new Pose(position, orientation); Assert.AreEqual(position, cameraInstance.PoseWorld.Position); Assert.AreEqual(orientation.ToRotationMatrix33(), cameraInstance.PoseWorld.Orientation); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.ToMatrix44F(), cameraInstance.ViewInverse)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.Inverse.ToMatrix44F(), cameraInstance.View)); // Set Position and Orientation position = new Vector3F(5, 6, 7); orientation = QuaternionF.CreateRotation(new Vector3F(1, -1, 6), -0.123f); cameraInstance.PoseWorld = new Pose(position, orientation); Assert.AreEqual(position, cameraInstance.PoseWorld.Position); Assert.AreEqual(orientation.ToRotationMatrix33(), cameraInstance.PoseWorld.Orientation); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.Inverse.ToMatrix44F(), cameraInstance.View)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.ToMatrix44F(), cameraInstance.ViewInverse)); } [Test] public void ViewTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Assert.AreEqual(Matrix44F.Identity, cameraInstance.View); Assert.AreEqual(Matrix44F.Identity, cameraInstance.ViewInverse); Vector3F position = new Vector3F(1, 2, 3); Vector3F target = new Vector3F(2, 5, 4); Vector3F upVector = new Vector3F(1, 1, 1); Matrix44F view = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.View = view; Assert.AreEqual(view, cameraInstance.View); Assert.AreEqual(view.Inverse, cameraInstance.ViewInverse); Vector3F originOfCamera = cameraInstance.PoseWorld.Position; originOfCamera = cameraInstance.View.TransformPosition(originOfCamera); Assert.IsTrue(Vector3F.AreNumericallyEqual(Vector3F.Zero, originOfCamera)); Vector4F positionView = new Vector4F(0, 0, -1, 1); Vector4F positionView2; // Transform a point from view space to world space. Vector4F positionWorld = cameraInstance.PoseWorld * positionView; Vector4F positionWorld2 = cameraInstance.ViewInverse * positionView; Assert.IsTrue(Vector4F.AreNumericallyEqual(positionWorld, positionWorld2)); // Transform a point from world space to view space. positionView = cameraInstance.PoseWorld.Inverse * positionWorld; positionView2 = cameraInstance.View * positionWorld; Assert.IsTrue(Vector4F.AreNumericallyEqual(positionView, positionView2)); cameraInstance.View = Matrix44F.Identity; Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); } [Test] public void InverseViewTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Assert.AreEqual(Matrix44F.Identity, cameraInstance.View); Assert.AreEqual(Matrix44F.Identity, cameraInstance.ViewInverse); Vector3F position = new Vector3F(1, 2, 3); Vector3F target = new Vector3F(2, 5, 4); Vector3F upVector = new Vector3F(1, 1, 1); Matrix44F view = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.ViewInverse = view.Inverse; Assert.IsTrue(Matrix44F.AreNumericallyEqual(view, cameraInstance.View)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(view.Inverse, cameraInstance.ViewInverse)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(view.Inverse, cameraInstance.PoseWorld.ToMatrix44F())); } [Test] public void LookAtTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Vector3F position = new Vector3F(1, 2, 3); Vector3F target = new Vector3F(2, 5, 4); Vector3F upVector = new Vector3F(1, 1, 1); cameraInstance.PoseWorld = new Pose(new Vector3F(1, 2, 3)); Matrix44F expected = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.LookAt(target, upVector); Assert.That(Matrix44F.AreNumericallyEqual(expected, cameraInstance.View)); position = new Vector3F(-2, 3, -7.5f); expected = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.LookAt(position, target, upVector); Assert.That(Vector3F.AreNumericallyEqual(position, cameraInstance.PoseWorld.Position)); Assert.That(Matrix44F.AreNumericallyEqual(expected, cameraInstance.View)); } [Test] public void PoseChangedTest() { bool poseChanged = false; CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); cameraInstance.PoseChanged += (sender, eventArgs) => poseChanged = true; cameraInstance.PoseWorld = new Pose(new Vector3F(1, 2, 3)); Assert.IsTrue(poseChanged); } [Test] public void ShapeChangedTest() { bool shapeChanged = false; CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); cameraInstance.BoundingShapeChanged += (sender, eventArgs) => shapeChanged = true; cameraInstance.Camera.Projection.Far = 9; Assert.IsTrue(shapeChanged); } [Test] public void SetProjectionTest() { Matrix44F projectionMatrix = Matrix44F.CreateOrthographicOffCenter(1, 4, 2, 5, 6, 11); OrthographicProjection orthographicProjection = new OrthographicProjection(); orthographicProjection.Set(projectionMatrix); CameraInstance cameraInstance = new CameraInstance(new Camera(orthographicProjection)); Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); Assert.That(Numeric.AreEqual(3, cameraInstance.Camera.Projection.Width)); Assert.That(Numeric.AreEqual(3, cameraInstance.Camera.Projection.Height)); Assert.That(Numeric.AreEqual(1f, cameraInstance.Camera.Projection.AspectRatio)); Assert.That(Numeric.AreEqual(6, cameraInstance.Camera.Projection.Near)); Assert.That(Numeric.AreEqual(11, cameraInstance.Camera.Projection.Far)); Assert.That(Numeric.AreEqual(1, cameraInstance.Camera.Projection.Left)); Assert.That(Numeric.AreEqual(4, cameraInstance.Camera.Projection.Right)); Assert.That(Numeric.AreEqual(2, cameraInstance.Camera.Projection.Bottom)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Top)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Depth)); Assert.That(Matrix44F.AreNumericallyEqual(orthographicProjection, cameraInstance.Camera.Projection)); Assert.That(Matrix44F.AreNumericallyEqual(orthographicProjection.Inverse, cameraInstance.Camera.Projection.Inverse)); Assert.IsNotNull(cameraInstance.BoundingShape); PerspectiveProjection perspectiveProjection = new PerspectiveProjection(); perspectiveProjection.Inverse = Matrix44F.CreatePerspectiveOffCenter(1, 5, 2, 5, 1, 10).Inverse; cameraInstance = new CameraInstance(new Camera(perspectiveProjection)); Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); Assert.That(Numeric.AreEqual(MathHelper.ToRadians(33.690067f), cameraInstance.Camera.Projection.FieldOfViewX)); Assert.That(Numeric.AreEqual(MathHelper.ToRadians(15.255119f), cameraInstance.Camera.Projection.FieldOfViewY)); Assert.That(Numeric.AreEqual(4, cameraInstance.Camera.Projection.Width)); Assert.That(Numeric.AreEqual(3, cameraInstance.Camera.Projection.Height)); Assert.That(Numeric.AreEqual(4.0f / 3.0f, cameraInstance.Camera.Projection.AspectRatio)); Assert.That(Numeric.AreEqual(1, cameraInstance.Camera.Projection.Left)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Right)); Assert.That(Numeric.AreEqual(2, cameraInstance.Camera.Projection.Bottom)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Top)); Assert.That(Numeric.AreEqual(1, cameraInstance.Camera.Projection.Near)); Assert.That(Numeric.AreEqual(10, cameraInstance.Camera.Projection.Far)); Assert.That(Numeric.AreEqual(9, cameraInstance.Camera.Projection.Depth)); Assert.IsNotNull(cameraInstance.BoundingShape); } } }
import Sequelize from 'sequelize' import dbConfig from '../config/dbConfig' // TODO: add logger const sequelize = new Sequelize(dbConfig.database, dbConfig.username, dbConfig.password, { host: dbConfig.host, port: dbConfig.port, dialect: dbConfig.dialect, dialectOptions: dbConfig.dialectOptions, operatorsAliases: false, pool: { max: 5, min: 0, idle: 1, }, }) const UserQuestion = require('./models/UserQuestion')(sequelize); const User = require('./models/User')(sequelize); const Topic = require('./models/Topic')(sequelize); const TestType = require('./models/TestType')(sequelize); const Tech = require('./models/Tech')(sequelize); const Source = require('./models/Source')(sequelize); const QuestionStatus = require('./models/QuestionStatus')(sequelize); const Question = require('./models/Question')(sequelize); const Answer = require('./models/Answer')(sequelize); Question.hasMany(Source, {foreignKey: 'question_id'}); Source.belongsTo(Question, {foreignKey: 'question_id'}); Question.hasMany(Answer, {foreignKey: 'question_id'}); Answer.belongsTo(Question, {foreignKey: 'question_id'}); Tech.belongsToMany(Topic, {foreignKey: 'tech_id'}); Topic.belongsToMany(Question, {foreignKey: 'topic_id'}); UserQuestion.hasOne(QuestionStatus, {foreignKey: 'status_id'}); User.belongsToMany(Question, {through: UserQuestion}); Question.belongsToMany(User, {through: UserQuestion}); sequelize.sync(); export default { UserQuestion, User, Topic, TestType, Tech, Source, QuestionStatus, Question, Answer, sync: sequelize.sync.bind(this), close: () => sequelize.connectionManager.close(), };
<?php class SXE extends SimpleXmlElement { public function count() { echo "Called Count!\n"; return parent::count(); } } $str = '<xml><c>asdf</c><c>ghjk</c></xml>'; $sxe = new SXE($str); var_dump(count($sxe)); ?> ==Done==
import 'dart:convert'; import 'package:archive/archive.dart'; import 'package:flutter_trading_volume/models/supported_pairs.dart'; import 'package:flutter_trading_volume/models/trades/binance_trade.dart'; import 'package:flutter_trading_volume/models/trades/bitfinex_trade.dart'; import 'package:flutter_trading_volume/models/trades/bitmex_trade.dart'; import 'package:flutter_trading_volume/models/trades/bitstamp_trade.dart'; import 'package:flutter_trading_volume/models/trades/bybit_trade.dart'; import 'package:flutter_trading_volume/models/trades/coinbase_trade.dart'; import 'package:flutter_trading_volume/models/trades/ftx_trade.dart'; import 'package:flutter_trading_volume/models/trades/kraken_trade.dart'; import 'package:flutter_trading_volume/models/trades/okex_trade.dart'; import 'package:flutter_trading_volume/utils/constants.dart'; import 'package:flutter_trading_volume/websockets/bitstamp_socket.dart'; import 'package:flutter_trading_volume/websockets/callbacks/exchange_callbacks.dart'; import 'package:flutter_trading_volume/websockets/coinbase_socket.dart'; import 'package:flutter_trading_volume/websockets/huobi_socket.dart'; import 'package:flutter_trading_volume/websockets/okex_socket.dart'; import '../binance_socket.dart'; import '../bitfinex_socket.dart'; import '../bitmex_socket.dart'; import '../bybit_socket.dart'; import '../ftx_socket.dart'; import '../kraken_socket.dart'; class ExchangeManager { SupportedPairs _currentPair; //Sockets BinanceSocket _binanceSocket; FtxSocket _ftxSocket; ByBitSocket _byBitSocket; BitmexSocket _bitmexSocket; BitfinexSocket _bitfinexSocket; KrakenSocket _krakenSocket; BitstampSocket _bitstampSocket; CoinbaseSocket _coinbaseSocket; HuobiSocket _huobiSocket; OkExSocket _okExSocket; //Callbacks ExchangeCallbacks _exchangeCallbacks; ExchangeManager(SupportedPairs pair, ExchangeCallbacks callbacks) { this._exchangeCallbacks = callbacks; this._currentPair = pair; _initExchanges(); } void _initExchanges() { _binanceSocket = new BinanceSocket(pair: _currentPair); _ftxSocket = new FtxSocket(pair: _currentPair); _byBitSocket = new ByBitSocket(pair: _currentPair); _bitmexSocket = new BitmexSocket(pair: _currentPair); _bitfinexSocket = new BitfinexSocket(pair: _currentPair); _krakenSocket = new KrakenSocket(pair: _currentPair); _bitstampSocket = new BitstampSocket(pair: _currentPair); _coinbaseSocket = new CoinbaseSocket(pair: _currentPair); _huobiSocket = new HuobiSocket(pair: _currentPair); _okExSocket = new OkExSocket(pair: _currentPair); } void updatePairs(SupportedPairs pair) { this._currentPair = pair; _initExchanges(); } void _listenForDataUpdate() { _binanceSocket.socket.stream.listen((event) { final trade = BinanceTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BINANCE_PRICE_ID); }); _ftxSocket.socket.stream.listen((event) { final trades = FtxTrade.fromJson(event.toString()); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, FTX_PRICE_ID); }); } }); _byBitSocket.socket.stream.listen((event) { final trade = ByBitTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BYBIT_PRICE_ID); }); _bitmexSocket.socket.stream.listen((event) { final trade = BitmexTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BITMEX_PRICE_ID); }); _bitfinexSocket.socket.stream.listen((event) { final trades = BitfinexTrade.fromJson(event.toString()); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, BITFINEX_PRICE_ID); }); } }); _krakenSocket.socket.stream.listen((event) { final trades = KrakenTrade.fromJson(event.toString()); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, KRAKEN_PRICE_ID); }); } }); _bitstampSocket.socket.stream.listen((event) { final trade = BitstampTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BITSTAMP_PRICE_ID); }); _coinbaseSocket.socket.stream.listen((event) { final trade = CoinbaseTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, COINBASE_PRICE_ID); }); _okExSocket.socket.stream.listen((event) { final inflater = Inflate(event); final trades = OkExTrade.fromJson(utf8.decode(inflater.getBytes())); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, OKEX_PRICE_ID); }); } }); //TODO: connection doesn't work, why?... _huobiSocket.socket.stream.listen((event) { //print(event); //final trade = CoinbaseTrade.fromJson(event.toString()); //_exchangeCallbacks.onTrade(trade, COINBASE_PRICE_ID); }); } void connectToSocket() { if (_binanceSocket.socket == null/* && (_currentExchange == SupportedExchange.ALL || _currentExchange == SupportedExchange.BINANCE)*/) { _binanceSocket.connect(); } if (_ftxSocket.socket == null/* && (_currentExchange == SupportedExchange.ALL || _currentExchange == SupportedExchange.FTX)*/) { _ftxSocket.connect(); } if(_byBitSocket.socket == null && _currentPair == SupportedPairs.BTC_USDT){ //TODO: Currently we don't support other pairs for ByBit _byBitSocket.connect(); } if(_bitmexSocket.socket == null && _currentPair == SupportedPairs.BTC_USDT){ //TODO: Currently we don't support other pairs for BitMEX _bitmexSocket.connect(); } if(_bitfinexSocket.socket == null ){ _bitfinexSocket.connect(); } if(_krakenSocket.socket == null ){ _krakenSocket.connect(); } if(_bitstampSocket.socket == null ){ _bitstampSocket.connect(); } if(_coinbaseSocket.socket == null ){ _coinbaseSocket.connect(); } if(_huobiSocket.socket == null ){ _huobiSocket.connect(); } if(_okExSocket.socket == null ){ _okExSocket.connect(); } _listenForDataUpdate(); } void closeConnection() { _binanceSocket.closeConnection(); _ftxSocket.closeConnection(); _byBitSocket.closeConnection(); _bitmexSocket.closeConnection(); _bitfinexSocket.closeConnection(); _krakenSocket.closeConnection(); _bitstampSocket.closeConnection(); _coinbaseSocket.closeConnection(); _huobiSocket.closeConnection(); _okExSocket.closeConnection(); } }
\ local utils = require("leftry.utils") local function quasiquote_eval(invariant, car, output) if utils.hasmetatable(car, list) then if car:car() == symbol("quasiquote-eval") then local cdr = car:cdr() assert(list.__len(cdr) == 1, "quasiquote_eval only accepts one parameter.") return compiler.expize(invariant, cdr:car(), output) end return list.cast(car, function(value) return quasiquote_eval(invariant, value, output) end) end if lua_ast[getmetatable(car)] then return car:gsub(list, function(value) return quasiquote_eval(invariant, value, output) end) end return car end local function compile_quasiquote_eval(invariant, cdr, output) local cadr = cdr:car() local exp = compiler.expize(invariant, quasiquote_eval(invariant, cadr, output), output) if utils.hasmetatable(exp, lua_name) then function exp:repr() return lua_functioncall.new(lua_name("lua_nameize"), lua_args.new(lua_explist({exp}))) end else function exp:repr() return exp end end return exp end local function escape_lua(invariant, data) if lua_ast[getmetatable(data)] then return data:repr() end if utils.hasmetatable(data, list) then if data:car() == symbol("quasiquote-eval") then return data end data = list.cast(data, function(value) return escape_lua(invariant, value) end) end return data end local function compile_quasiquote(invariant, cdr, output) assert(list.__len(cdr) == 1, "quasiquote only accepts one parameter.") local cadr = cdr:car() return quasiquote_eval(invariant, escape_lua(invariant, cadr), output) end local function read_quasiquote(invariant, position) local rest, values = read(invariant, position + 1) if rest then values[1] = list(symbol("quasiquote"), values[1]) return rest, values end end local function read_quasiquote_eval(invariant, position) local rest, values = read(invariant, position + 1) if rest then values[1] = list(symbol("quasiquote-eval"), values[1]) return rest, values end end { read = { [","] = {read_quasiquote_eval}, ["`"] = {read_quasiquote} }, lua = { ["quasiquote"] = {expize=compile_quasiquote, statize=compile_quasiquote}, ["quasiquote-eval"] = {expize=compile_quasiquote_eval, statize=compile_quasiquote_eval} } }
from django.shortcuts import render from django.db.models import Max, Count from leads.models import Team, Distance from leads.serializers import TeamSerializer, TeamActvSerializer, DistanceSerializer from rest_framework import generics class TeamListAPI(generics.ListAPIView): queryset = Team.objects.all() serializer_class = TeamSerializer class TeamRetriveAPI(generics.RetrieveAPIView): queryset = Team.objects.all() serializer_class = TeamSerializer class TeamRetriveUpdateAPI(generics.RetrieveUpdateAPIView): queryset = Team.objects.all() serializer_class = TeamActvSerializer class DistanceCreateAPI(generics.CreateAPIView): queryset = Distance.objects.all() serializer_class = DistanceSerializer class DistanceRetriveUpdateDestroyAPI(generics.RetrieveUpdateDestroyAPIView): queryset = Distance.objects.all() serializer_class = DistanceSerializer
unorm = require('unorm'); // Function to display Unicode codepoints of a string. function codepoints(string) { return string.split('').map(function(chr) { var codepoint = chr.charCodeAt(0); return (codepoint >= 33 && codepoint <= 126) ? JSON.stringify(chr) : 'U+' + codepoint.toString(16).toUpperCase(); }).join(' '); } // Scientific Ångström symbol is converted to Scandinavian letter Å. angstrom = '\u212B'; console.log('- Example 1 -'); console.log(codepoints(angstrom)); console.log(codepoints(unorm.nfc(angstrom))); // German ä and ü decomposed into a and u with Combining Diaeresis character. letters = '\u00E4\u00FC' console.log('- Example 2 -'); console.log(codepoints(letters)); console.log(codepoints(unorm.nfd(letters))); // String optimized for compatibility, ie. CO₂ becomes CO2. scientific = 'CO\u2082 and E=mc\u00B2' console.log('- Example 3 -'); console.log(scientific) console.log(unorm.nfkc(scientific)); // NOTE: Rest of the example requires XRegExp: npm install xregexp // Remove combining characters / marks from Swedish name, ie. ö becomes o. // This is useful for indexing and searching internationalized text. XRegExp = require('xregexp'); name = '\u00C5ngstr\u00F6m'; console.log('- Example 4 -'); console.log(unorm.nfkd(name)); console.log(unorm.nfkd(name).replace(XRegExp('\\p{M}', 'g'), ''));
ActiveDirectory-Password-Change =============================== ![Screenshot](https://raw.github.com/janikvonrotz/ActiveDirectory-Password-Change/master/doc/screenshot.png) * Install dependencies with [bower](https://github.com/bower/bower) * Run a bower update in the project root * Add an ActiveDirectory user which has the right to reset a user's password * Set variables in `index.php` * `$ldapuser` * `$ldappwd` * `$ldaphost` * `$SecKey` * Move the project on a webserver * Support for php ldap module must be enabled * Use a SSL certified connection when publishing the site to the internet * Open the password change website like this `https://site.yourdomain.com/index.php?sec=[your secure key from $SecKey]`
<?php session_start(); error_reporting(0); if(isset($_SESSION['email'])) { $email = $_SESSION['email']; $id=$_GET['id']; $errors = array(); $db = mysqli_connect('localhost:3307', 'root', '', 'foodshala'); $query="INSERT INTO cart(email, dishid) VALUES('$email','$id')"; mysqli_query($db, $query); echo "done"; echo "<script>"; echo "alert('Added to the cart Successfully!');"; echo "location='shop.php';"; echo "</script>"; } else{ header('location: userlogin.php'); } ?>
# Alignments These directories hold Turtle-based alignments between Brick and other ontologies, as well as the auxiliary files needed to generate those alignments where necessary. Alignments should be distributed as Turtle files with a specific naming schema. For an ontology `X`, the alignment file should be `Brick-X-alignment.ttl`.
/* Copyright © LiquidWeb Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "bytes" "fmt" "html/template" "io/ioutil" "os" "path/filepath" "strings" "time" "github.com/spf13/cobra" "gopkg.in/yaml.v2" "github.com/liquidweb/liquidweb-cli/instance" "github.com/liquidweb/liquidweb-cli/utils" ) var planCmd = &cobra.Command{ Use: "plan", Short: "Process YAML plan file", Long: `Process YAML plan file. Examples: 'lw plan --file plan.yaml --var envname=dev' Any value in the plan can optionally utilitize variables in Golang's template style. To access environment variables use .Env.VARNAME (i.e. .Env.USER ) Example plan file to create a cloud server: --- cloud: server: create: - type: "SS.VPS" template: "UBUNTU_1804_UNMANAGED" zone: 40460 hostname: "db1.somedomain.com" ips: 1 public-ssh-key: "public ssh key string here " config-id: 88 - type: "SS.VPS" template: "UBUNTU_1804_UNMANAGED" zone: 40460 hostname: "web1.{{- .Var.envname -}}.somedomain.com" ips: 1 public-ssh-key: "public ssh key string here " config-id: 88 `, Run: func(cmd *cobra.Command, args []string) { planFile, _ := cmd.Flags().GetString("file") varSliceFlag, err := cmd.Flags().GetStringSlice("var") if err != nil { lwCliInst.Die(err) } _, err = os.Stat(planFile) if err != nil { if os.IsNotExist(err) { lwCliInst.Die(fmt.Errorf("Plan file \"%s\" does not exist.\n", planFile)) } else { lwCliInst.Die(err) } } planYaml, err := ioutil.ReadFile(filepath.Clean(planFile)) if err != nil { lwCliInst.Die(err) } planYaml, err = processTemplate(varSliceFlag, planYaml) if err != nil { lwCliInst.Die(err) } var plan instance.Plan err = yaml.Unmarshal(planYaml, &plan) if err != nil { lwCliInst.Die(fmt.Errorf("Error parsing YAML file: %s\n", err)) } if err := lwCliInst.ProcessPlan(&plan); err != nil { lwCliInst.Die(err) } }, } func envToMap() map[string]string { envMap := make(map[string]string) for _, v := range os.Environ() { split_v := strings.Split(v, "=") envMap[split_v[0]] = split_v[1] } return envMap } func varsToMap(vars []string) map[string]string { varMap := make(map[string]string) for _, v := range vars { s := strings.Split(v, "=") varMap[s[0]] = s[1] } return varMap } func processTemplate(varSliceFlag []string, planYaml []byte) ([]byte, error) { type TemplateVars struct { Var map[string]string Env map[string]string } tmplVars := &TemplateVars{ Var: varsToMap(varSliceFlag), Env: envToMap(), } var tmplBytes bytes.Buffer tmpl, err := template.New("plan.yaml").Funcs(template.FuncMap{ "generatePassword": func(length int) string { return utils.RandomString(length) }, "now": time.Now, "hex": func(number int64) string { return fmt.Sprintf("%X", number) }, }). Parse(string(planYaml)) if err != nil { return nil, err } err = tmpl.Execute(&tmplBytes, tmplVars) if err != nil { return nil, err } return tmplBytes.Bytes(), nil } func init() { rootCmd.AddCommand(planCmd) planCmd.Flags().String("file", "", "YAML file used to define a plan") planCmd.Flags().StringSlice("var", nil, "define variable name") if err := planCmd.MarkFlagRequired("file"); err != nil { lwCliInst.Die(err) } }
package br.com.alura.technews.retrofit.service import br.com.alura.technews.model.Noticia import retrofit2.Call import retrofit2.http.* interface NoticiaService { @GET("noticias") fun buscaTodas(): Call<List<Noticia>> @POST("noticias") fun salva(@Body noticia: Noticia): Call<Noticia> @PUT("noticias/{id}") fun edita(@Path("id") id: Long, @Body noticia: Noticia) : Call<Noticia> @DELETE("noticias/{id}") fun remove(@Path("id") id: Long): Call<Void> }
#[macro_use] extern crate c2rust_xcheck_derive; extern crate c2rust_xcheck_runtime; extern crate c2rust_bitfields; use c2rust_bitfields::BitfieldStruct; use c2rust_xcheck_runtime::hash::jodyhash::JodyHasher; use c2rust_xcheck_runtime::hash::simple::SimpleHasher; use c2rust_xcheck_runtime::hash::CrossCheckHash as XCH; #[test] fn test_bitfields() { #[repr(C)] #[derive(BitfieldStruct, CrossCheckHash, Default)] struct Foo { #[bitfield(name = "a", ty = "u32", bits = "0..=6")] #[bitfield(name = "b", ty = "u32", bits = "7..=17")] a_b: [u8; 3], #[bitfield(padding)] _pad: [u8; 1], } // These tests should match the ones in struct10.c let mut x = Foo::default(); x.set_a(42); x.set_b(1337); let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0x24e75f75c47e329a)); let x = Foo { a_b: [0xAA, 0x55, 0xAA], _pad: [0x55], }; let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0x24e75fad2461b12c)); let x = Foo { a_b: [0x55, 0xAA, 0x55], _pad: [0xAA], }; let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0xc3e72e2d630778ed)); let x = Foo { a_b: [0x78, 0x56, 0x34], _pad: [0x12], }; let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0xb6e8a1efb3617525)); }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. using System; namespace FASTER.core { /// <summary> /// Configuration settings for serializing objects /// </summary> /// <typeparam name="Key"></typeparam> /// <typeparam name="Value"></typeparam> public class SerializerSettings<Key, Value> { /// <summary> /// Key serializer /// </summary> public Func<IObjectSerializer<Key>> keySerializer; /// <summary> /// Value serializer /// </summary> public Func<IObjectSerializer<Value>> valueSerializer; } /// <summary> /// Interface for variable length in-place objects /// modeled as structs, in FASTER /// </summary> /// <typeparam name="T"></typeparam> public interface IVariableLengthStruct<T> { /// <summary> /// Actual length of object /// </summary> /// <param name="t"></param> /// <returns></returns> int GetLength(ref T t); /// <summary> /// Average length of objects, make sure this includes the object /// header needed to compute the actual object length /// </summary> /// <returns></returns> int GetAverageLength(); /// <summary> /// Initial length, when populating for RMW from given input /// </summary> /// <typeparam name="Input"></typeparam> /// <param name="input"></param> /// <returns></returns> int GetInitialLength<Input>(ref Input input); } /// <summary> /// Length specification for fixed size (normal) structs /// </summary> /// <typeparam name="T"></typeparam> public readonly struct FixedLengthStruct<T> : IVariableLengthStruct<T> { private static readonly int size = Utility.GetSize(default(T)); /// <summary> /// Get average length /// </summary> /// <returns></returns> public int GetAverageLength() => size; /// <summary> /// Get initial length /// </summary> /// <typeparam name="Input"></typeparam> /// <param name="input"></param> /// <returns></returns> public int GetInitialLength<Input>(ref Input input) => size; /// <summary> /// Get length /// </summary> /// <param name="t"></param> /// <returns></returns> public int GetLength(ref T t) => size; } /// <summary> /// Settings for variable length keys and values /// </summary> /// <typeparam name="Key"></typeparam> /// <typeparam name="Value"></typeparam> public class VariableLengthStructSettings<Key, Value> { /// <summary> /// Key length /// </summary> public IVariableLengthStruct<Key> keyLength; /// <summary> /// Value length /// </summary> public IVariableLengthStruct<Value> valueLength; } /// <summary> /// Configuration settings for hybrid log /// </summary> public class LogSettings { /// <summary> /// Device used for main hybrid log /// </summary> public IDevice LogDevice = new NullDevice(); /// <summary> /// Device used for serialized heap objects in hybrid log /// </summary> public IDevice ObjectLogDevice = new NullDevice(); /// <summary> /// Size of a segment (group of pages), in bits /// </summary> public int PageSizeBits = 25; /// <summary> /// Size of a segment (group of pages), in bits /// </summary> public int SegmentSizeBits = 30; /// <summary> /// Total size of in-memory part of log, in bits /// </summary> public int MemorySizeBits = 34; /// <summary> /// Fraction of log marked as mutable (in-place updates) /// </summary> public double MutableFraction = 0.9; /// <summary> /// Copy reads to tail of log /// </summary> public bool CopyReadsToTail = false; /// <summary> /// Settings for optional read cache /// Overrides the "copy reads to tail" setting /// </summary> public ReadCacheSettings ReadCacheSettings = null; } /// <summary> /// Configuration settings for hybrid log /// </summary> public class ReadCacheSettings { /// <summary> /// Size of a segment (group of pages), in bits /// </summary> public int PageSizeBits = 25; /// <summary> /// Total size of in-memory part of log, in bits /// </summary> public int MemorySizeBits = 34; /// <summary> /// Fraction of log head (in memory) used for second chance /// copy to tail. This is (1 - MutableFraction) for the /// underlying log /// </summary> public double SecondChanceFraction = 0.1; } }
from datetime import datetime def get_quote() : date = str(datetime.now().date()) quotes = open("quotes.txt",'r',encoding="ISO-8859-1") line = quotes.readline() while line is not "": if line.split(' ', 1)[0] == date: quotation = line.split(' ',1)[1] print_quote(quotation) quotes.close() return None line = quotes.readline() quotes.close() print("Getting todays quote. press Ctrl+c to cancel...") import requests from bs4 import BeautifulSoup try: page = requests.get("https://theysaidso.com/quote-of-the-day/", timeout=5) # page.status_code soup = BeautifulSoup(page.content, 'html.parser') carousel = soup.find_all(id="myCarousel") lead = carousel[0].find_all('div', class_="lead") text = lead[0].find('span') text = text.get_text() author = lead[0].find_all('span')[1] author = author.get_text() quotation = text+" -"+author print_quote(quotation) try: # write the quote to file. with open("quotes.txt",'a') as quotes: quotes.write(date + ' ' + text + ' -' + author + "\n") except Exception as e: print(e) except: print("Cannot fetch todays quote. Please check your network connection.") def print_quote(quotation): print("\n>>>Todays quote : \n "+ quotation +"\n") if __name__ == "__main__": get_quote()
# frozen_string_literal: true module Resolvers module ErrorTracking class SentryErrorsResolver < BaseResolver def resolve(**args) args[:cursor] = args.delete(:after) project = object.project result = ::ErrorTracking::ListIssuesService.new( project, context[:current_user], args ).execute next_cursor = result[:pagination]&.dig('next', 'cursor') previous_cursor = result[:pagination]&.dig('previous', 'cursor') issues = result[:issues] # ReactiveCache is still fetching data return if issues.nil? Gitlab::Graphql::ExternallyPaginatedArray.new(previous_cursor, next_cursor, *issues) end end end end
// /////////////////////////////////////////////////////////////////// // // class: seg // // /////////////////////////////////////////////////////////////////// #ifndef pniseg_h #define pniseg_h // /////////////////////////////////////////////////////////////////// #include "pnimath.h" #include "pnivec3.h" // /////////////////////////////////////////////////////////////////// namespace pni { namespace math { // /////////////////////////////////////////////////////////////////// /** The seg is a representation of a line with a beginning and end point. It is stored as the origin (vec3) of the segment, the normalized direction (vec3) the segment points in, and the length (float). @note This internal representation differs from another common representation which has a non-normalized direction and a length which can be treated as a paramemtric value ranging from 0 to 1. */ class PNIMATHAPI seg { public: typedef Trait TraitType; typedef TraitType::ValueType ValueType; typedef ValueType& RefType; typedef const ValueType& ConstRefType; typedef seg ThisType; enum InitState { NoInit }; seg (); seg ( InitState ); seg ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ); seg ( const vec3& posIn, const vec3& endIn ); seg ( const vec3& posIn, ValueType headingIn, ValueType pitchIn, ValueType lengthIn ); seg ( const seg& orig ); ~seg (); // set methods seg& operator = ( const seg& orig ); void set ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ); void set ( const vec3& posIn, const vec3& endIn ); void set ( const vec3& posIn, ValueType headingIn, ValueType pitchIn, ValueType lengthIn ); // get methods void get ( vec3& posOut, vec3& dirOut, ValueType& lengthOut ) const; void get ( vec3& begOut, vec3& endOut ) const; void getPos ( vec3& posOut ) const; void getDir ( vec3& dirOut ) const; const vec3& getPos () const; const vec3& getDir () const; void setLength ( ValueType val ); ValueType getLength () const; // equality methods bool operator == ( const ThisType& vecIn ) const; bool equal ( const ThisType& vecIn, ValueType fuzz = TraitType::fuzzVal ) const; // get the closest point to pt in the primative bool closestPtIn ( vec3& dest, ValueType xval, ValueType yval, ValueType zval ) const; bool closestPtIn ( vec3& dest, const vec3& pt ) const; // Point contains tests. int contains ( ValueType x, ValueType y, ValueType z ) const; int contains ( const vec3& pt ) const; int contains ( const seg& segIn ) const; // extendBy () //mtcl: mtcl_begin_ignore bool extendBy ( ValueType x, ValueType y, ValueType c ); bool extendBy ( const vec3& pt ); //mtcl: mtcl_end_ignore /// Find intersection of two segs, or none. bool isect ( ThisType const& rhs, vec3& dst ) const; // simple point interp method // this interpolates along the segment given the range [0,1] vec3 lerp ( ValueType value ) const; void lerp ( vec3& dest, ValueType value ) const; // xform methods void xform ( const seg& seg, const matrix4& mat ); void xform4 ( const seg& seg, const matrix4& mat ); protected: vec3 pos; vec3 dir; /// Always normalized ValueType length; /// Actual length, not 0 <= length <= 1 private: }; ///////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////// PNIMATHINLINE seg:: seg () : length ( TraitType::zeroVal ) { } PNIMATHINLINE seg:: seg ( InitState ) : pos ( vec3::NoInit ), dir ( vec3::NoInit ) { } PNIMATHINLINE seg:: seg ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ) : pos ( posIn ), dir ( dirIn ), length ( lengthIn ) { } PNIMATHINLINE seg:: seg ( const vec3& posIn, const vec3& endIn ) { set ( posIn, endIn ); } PNIMATHINLINE seg:: seg ( const vec3& posIn, ValueType headingIn, ValueType pitchIn, ValueType lengthIn ) { set ( posIn, headingIn, pitchIn, lengthIn ); } PNIMATHINLINE seg:: seg ( const seg& orig ) : pos ( orig.pos ), dir ( orig.dir ), length ( orig.length ) { } PNIMATHINLINE seg:: ~seg () { } ///////////////////////////////////////////////////////////////////// // set methods PNIMATHINLINE seg& seg:: operator = ( const seg& orig ) { pos = orig.pos; dir = orig.dir; length = orig.length; return *this; } PNIMATHINLINE void seg:: set ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ) { pos = posIn; dir = dirIn; length = lengthIn; } ///////////////////////////////////////////////////////////////////// // get methods PNIMATHINLINE void seg:: get ( vec3& posOut, vec3& dirOut, ValueType& lengthOut ) const { posOut = pos; dirOut = dir; lengthOut = length; } PNIMATHINLINE void seg:: get ( vec3& begOut, vec3& endOut ) const { begOut = pos; endOut = dir; endOut *= length; endOut += begOut; } PNIMATHINLINE void seg:: getPos ( vec3& posOut ) const { posOut = pos; } PNIMATHINLINE void seg:: getDir ( vec3& dirOut ) const { dirOut = dir; } PNIMATHINLINE const vec3& seg:: getPos () const { return pos; } PNIMATHINLINE const vec3& seg:: getDir () const { return dir; } PNIMATHINLINE void seg:: setLength ( ValueType val ) { length = val; } PNIMATHINLINE seg::ValueType seg:: getLength () const { return length; } ///////////////////////////////////////////////////////////////////// } // end namespace math } // end namespace pni ///////////////////////////////////////////////////////////////////// #endif // pniseg_h
#!/bin/bash sudo apxs -a -i -c mod_waf.c sudo service apache2 restart sudo chown -R www-data.www-data /home/dexter/git_working/waf_apache_module/waf sudo chmod -R 774 /home/dexter/git_working/waf_apache_module/waf sudo chmod 777 /usr/lib/apache2/modules/mod_waf.so
; VL 2014 -- VL Verilog Toolkit, 2014 Edition ; Copyright (C) 2008-2015 Centaur Technology ; ; Contact: ; Centaur Technology Formal Verification Group ; 7600-C N. Capital of Texas Highway, Suite 300, Austin, TX 78731, USA. ; http://www.centtech.com/ ; ; License: (An MIT/X11-style license) ; ; Permission is hereby granted, free of charge, to any person obtaining a ; copy of this software and associated documentation files (the "Software"), ; to deal in the Software without restriction, including without limitation ; the rights to use, copy, modify, merge, publish, distribute, sublicense, ; and/or sell copies of the Software, and to permit persons to whom the ; Software is furnished to do so, subject to the following conditions: ; ; The above copyright notice and this permission notice shall be included in ; all copies or substantial portions of the Software. ; ; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING ; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ; DEALINGS IN THE SOFTWARE. ; ; Original author: Jared Davis <[email protected]> (in-package "VL2014") (include-book "std/strings/cat" :dir :system) (include-book "std/util/defval" :dir :system) (include-book "centaur/fty/fixequiv" :dir :system) (include-book "centaur/fty/basetypes" :dir :system) (local (include-book "misc/assert" :dir :system)) (local (include-book "arithmetic")) (local (std::add-default-post-define-hook :fix)) (defsection url-encoding :parents (utilities) :short "Functions for % encoding strings for use in URLs, as described in <a href='http://tools.ietf.org/html/rfc3986'>RFC 3986</a>." :long "<p>Per RFC 3986, the only unreserved characters are ALPHA, DIGIT, -, ., _, and ~. We implement some functions to percent-encode other characters in character lists and strings.</p>") (local (xdoc::set-default-parents url-encoding)) (define vl-url-encode-char ((x characterp)) :short "URL encode a single character. (slow, logically nice version)." :returns (encoding character-listp "Encoded version of X, in proper order.") :long "<p>See @(see vl-fast-url-encode-char) for an faster, array-lookup alternative.</p>" (let ((x (char-fix x))) (if (or (and (char<= #\A x) (char<= x #\Z)) (and (char<= #\a x) (char<= x #\z)) (and (char<= #\0 x) (char<= x #\9)) (member x '(#\- #\_ #\. #\~))) (list x) (let* ((hex-code (explode-atom (char-code x) 16)) (hex-code (if (eql (len hex-code) 1) (cons #\0 hex-code) hex-code))) (cons #\% hex-code)))) /// (local (progn (assert! (equal (implode (vl-url-encode-char #\a)) "a")) (assert! (equal (implode (vl-url-encode-char #\Space)) "%20")) (assert! (equal (implode (vl-url-encode-char (code-char 0))) "%00"))))) (define vl-make-url-encode-array ((n natp)) :parents (*vl-url-encode-array*) :guard (<= n 255) :hooks nil (if (zp n) (list (cons n (vl-url-encode-char (code-char n)))) (cons (cons n (vl-url-encode-char (code-char n))) (vl-make-url-encode-array (- n 1))))) (defval *vl-url-encode-array* :short "Array binding character codes to the pre-computed URL encodings." :showval t (compress1 'vl-url-encode-array (cons '(:header :dimensions (256) :maximum-length 257 :name vl-url-encode-array) (vl-make-url-encode-array 255)))) (define vl-fast-url-encode-char ((x :type character)) :short "URL encode a single character. (fast, array-based version)" :inline t :enabled t :verify-guards nil :hooks nil (mbe :logic (vl-url-encode-char x) :exec (aref1 'vl-url-encode-array *vl-url-encode-array* (char-code x))) /// (local (in-theory (disable aref1))) (local (defun test (n) (and (equal (aref1 'vl-url-encode-array *vl-url-encode-array* n) (vl-url-encode-char (code-char n))) (if (zp n) t (test (- n 1)))))) (local (defthm l0 (implies (and (test n) (natp n) (natp i) (<= i n)) (equal (aref1 'vl-url-encode-array *vl-url-encode-array* i) (vl-url-encode-char (code-char i)))))) (local (defthm l1 (implies (and (natp i) (<= i 255)) (equal (aref1 'vl-url-encode-array *vl-url-encode-array* i) (vl-url-encode-char (code-char i)))) :hints(("Goal" :use ((:instance l0 (n 255))))))) (local (defthm l2 (implies (characterp x) (equal (aref1 'vl-url-encode-array *vl-url-encode-array* (char-code x)) (vl-url-encode-char x))))) (verify-guards vl-fast-url-encode-char$inline)) (define vl-url-encode-chars-aux ((chars character-listp) acc) :short "URL encode a list of characters onto an accumulator in reverse order." :returns (encoded character-listp :hyp (character-listp acc)) :verbosep t (if (atom chars) acc (vl-url-encode-chars-aux (cdr chars) (revappend (vl-fast-url-encode-char (car chars)) acc))) /// (defthm true-listp-of-vl-url-encode-chars-aux (equal (true-listp (vl-url-encode-chars-aux x acc)) (true-listp acc)))) (define vl-url-encode-chars ((x character-listp)) :short "Simple way to URL encode a list of characters." :returns (encoded character-listp) :inline t ; This could be optimized with nreverse, but since the printer only uses the ; aux function anyway, I haven't bothered. (reverse (vl-url-encode-chars-aux x nil)) /// (defthm true-listp-of-vl-url-encode-chars (true-listp (vl-url-encode-chars x)) :rule-classes :type-prescription)) (define vl-url-encode-string-aux :short "Efficiently way to URL encode a string, in reverse order, without exploding it." ((x stringp) (n natp) (xl (eql xl (length x))) acc) :guard (<= n xl) :long "<p>This has such a nice logical definition that we just leave it enabled.</p>" :enabled t ; Removed after v7-2 by Matt K. since logically, the definition is ; non-recursive: ; :measure (nfix (- (nfix xl) (nfix n))) :verify-guards nil :hooks nil (mbe :logic (vl-url-encode-chars-aux (nthcdr n (explode x)) acc) :exec (b* (((when (mbe :logic (zp (- (nfix xl) (nfix n))) :exec (eql n xl))) acc) (char (char x n)) (encoding (vl-fast-url-encode-char char)) (acc (revappend encoding acc))) (vl-url-encode-string-aux x (+ 1 (lnfix n)) xl acc))) /// (local (in-theory (enable vl-url-encode-string-aux vl-url-encode-chars-aux))) (verify-guards vl-url-encode-string-aux)) (define vl-url-encode-string :short "Simple way to URL encode a string." ((x stringp :type string)) :returns (encoded stringp :rule-classes :type-prescription) :split-types t :inline t (let ((x (mbe :logic (str-fix x) :exec x))) (str::rchars-to-string (vl-url-encode-string-aux x 0 (length x) nil))) /// (local (assert! (let ((x "foo123$%20 blah !==[]{}7&*^!@&*^&*)($")) (equal (vl-url-encode-string x) (implode (vl-url-encode-chars (explode x))))))))
<?php namespace LAuth\Plugins\UEditor; use Illuminate\Http\Request; class Uploadvideo extends Uploadfile implements Contracts\UEditor { public function __construct(Request $request) { parent::__construct($request); $this->config = [ "path" => config('ueditor.videoPathFormat'), "size" => config('ueditor.videoMaxSize'), "allow" => config('ueditor.videoAllowFiles'), 'name' => config('ueditor.videoFieldName'), ]; } }
--- title: ComponentCounts searchTitle: Lua Spell ComponentCounts weight: 1 hidden: true menuTitle: ComponentCounts --- ## ComponentCounts ```lua Spell:ComponentCounts(number i); -- number ```
var browserify = require('browserify'), watchify = require('watchify'), gulp = require('gulp'), merge = require('merge-stream'), file = require('gulp-file'), jsonEditor = require("gulp-json-editor"), del = require('del'), source = require('vinyl-source-stream'), fs = require('fs'), sourceFile = './frame.js', destFolder = './build/', destFile = 'frame.bundle.js'; gulp.task('clean', ['version'], function () { return del(destFolder); }); gulp.task('version', function () { let version = fs.readFileSync('../../../version.txt', 'utf8'); let versionMetadata = fs.readFileSync('../../../version-metadata.txt'); let fullVersion = version; if (versionMetadata && versionMetadata != '') fullVersion = version + '-' + versionMetadata; var versionJson = `{ "base": "${version}", "metadata": "${versionMetadata}", "full": "${fullVersion}" }`; let manifestPipe = gulp.src("./manifest.json") .pipe(jsonEditor({ 'version': version })) .pipe(gulp.dest('.')) let versionPipe = file('version.json', versionJson, {src: true}) .pipe(gulp.dest('.')); return merge(manifestPipe, versionPipe); }); gulp.task('copy', ['version', 'clean'], function () { return gulp.src([ './*.json', './*.png', './*.js', '!gulpfile.js', './*.html', 'semantic/dist/semantic.min.css' ]).pipe(gulp.dest(destFolder)); }); gulp.task('browserify', ['version', 'clean', 'copy'], function() { return browserify(sourceFile) .bundle() .pipe(source(destFile)) .pipe(gulp.dest(destFolder)); }); gulp.task('default', ['version', 'clean', 'copy', 'browserify']);
using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Text; using System.Threading.Tasks; using HtmlAgilityPack; namespace BRUParserTable { public class BRUParser { WebClient webClient = new WebClient(); string Return; public async Task<string> GetTable(int number) { string page = webClient.DownloadString($"http://vuz2.bru.by/rate/{number}/"); HtmlDocument doc = new HtmlDocument(); doc.LoadHtml(page); var query = from table in doc.DocumentNode.SelectNodes("//table").Cast<HtmlNode>() from row in table.SelectNodes("tr").Cast<HtmlNode>() from cell in row.SelectNodes("th|td").Cast<HtmlNode>() select new { Table = table.Id, CellText = cell.InnerText }; foreach (var cell in query) { Return += string.Format("{0}: {1}", cell.Table, cell.CellText); } return Return; } } }
--- order: 1 title: scala_基础 date: 2021-02-08 09:57:38 permalink: /pages/b8ef2f/ categories: - 语言 - scala tags: - null comment: true --- # 第一章 基础 ## 常用类型 scala中常用类型如下: - Byte - Char - Short - Int - Long - Float - Double - Booean 这些类型都是类,所以在scala中不需要包装类型,在基本类型和包装类型之间的转换工作是scala编译器的事。 除此之外,还有 `Null` 类型,指代 `null` 或者空引用。 `Nothing` 是所有其它类型的子类型,包括空值。 `Any` 是其它类型的父类型, `AnyRef` 是其它引用类型的父类型。 在scala中,我们使用方法而不是强制类型转换,来做数值类型之间的转换。例如: ```scala 99.44.toInt // 99 99.toChar // 'c' ``` 和Java一样, `toString` 将任意对象转换成字符串,要将包含了数字的字符串转换成数字,使用 `toInt` 或者 `toDouble` 。 ## 算术和操作符重载 和Java相比,Scala并没有提供 `++` 和 `--` 操作符,我们需要使用 `+=1` 或者 `-=1` ; 对于常规的 `BigInt` 和 `BigDecimal` 对象,我们可以使用常规的方法使用那些数学操作符: ```scala val x:BigInt = 12121212 x*x*x ``` 在java中,我们需要使用 `x.multiply(x).multiply(x)` 。 ## 调用函数和方法 相比Java,在scala中使用数学函数更简单,我们不需要从某个类的调用它的静态方法。 ```scala import scala.math._ sqrt(2) pow(2,4) ``` 在使用以scala开头的包时,我们可以省略scala的前缀。例如 `import math._` 等价于 `import scala.math._` 。 Scala中没有静态方法,但是提供了单例对象。 不带参数的方法通常不使用圆括号,一般来讲,没有参数并不改变当前对象的方法都不带圆括号。 scala中允许使用数字 `*` 字符串,从而实现复制字符串的功能。 ## apply方法 在scala中,我们通常使用类似函数调用的语法。例如: ```scala "Hello"(4) // o ``` 你可以将这种用法当作 `()` 操作符的重载形式,它背后原理是实现一个名为 `apply` 的方法。所以 `"Hello"(4)` 相当于以下代码: ```scala "Hello".apply(4) ```
# GogoKit - viagogo API Client Library for PHP [![Package Version](https://img.shields.io/packagist/v/viagogo/gogokit.svg?style=flat)][version] [![Total Downloads](https://img.shields.io/packagist/dt/viagogo/gogokit.svg?style=flat)][downloads] [![Code Climate](https://img.shields.io/codeclimate/github/viagogo/gogokit.php.svg?style=flat)][codeclimate] [version]: https://packagist.org/packages/viagogo/gogokit [downloads]: https://packagist.org/packages/viagogo/gogokit [codeclimate]: https://codeclimate.com/github/viagogo/gogokit.php [apidocs]: http://developer.viagogo.net GogoKit is a lightweight, viagogo API client library for PHP. Our [developer site][apidocs] documents all of the viagogo APIs. ## Installation [composer]: https://getcomposer.org Install via [Composer][composer]. ``` $ composer require viagogo/gogokit ``` ## Usage [apidocsgettingstarted]: http://developer.viagogo.net/#getting-started See our [developer site][apidocsgettingstarted] for more examples. ```php // All methods require authentication. To get your viagogo OAuth credentials, // See TODO: docs url $configuration = new Viagogo\Core\ViagogoConfiguration(); $configuration->clientId = 'CLIENT_ID'; $configuration->clientSecret = 'CLIENT_SECRET'; $viagogoClient = new Viagogo\ViagogoClient($configuration); $viagogoClient->setToken($viagogoClient->getOAuthClient()->getClientAccessToken()); // Get an event by id $event = $viagogoClient->getEventClient()->getEvent(676615); // Get a list of results that match your search query $searchResults = $viagogoClient->getSearchClient()->getSearch("FC Barcelona tickets"); ``` ## Supported Platforms * PHP 5.5 or higher ## How to contribute All submissions are welcome. Fork the repository, read the rest of this README file and make some changes. Once you're done with your changes send a pull request. Thanks! ## Need Help? Found a bug? [submitanissue]: https://github.com/viagogo/gogokit.php/issues Just [submit a issue][submitanissue] if you need any help. And, of course, feel free to submit pull requests with bug fixes or changes.
<html> <head> welcome to simplexue </head> <body> <?php if($_POST['user'] && $_POST['pass']) { $conn = mysql_connect("********", "*****", "********"); mysql_select_db("phpformysql") or die("Could not select database"); if ($conn->connect_error) { die("Connection failed: " . mysql_error($conn)); } $user = $_POST[user]; $pass = md5($_POST[pass]); $sql = "select pw from php where user='$user'"; $query = mysql_query($sql); if (!$query) { printf("Error: %s\n", mysql_error($conn)); exit(); } $row = mysql_fetch_array($query, MYSQL_ASSOC); //echo $row["pw"]; if (($row[pw]) && (!strcasecmp($pass, $row[pw]))) { echo "<p>Logged in! Key:************** </p>"; } else { echo("<p>Log in failure!</p>"); } } ?> <form method=post action=index.php> <input type=text name=user value="Username"> <input type=password name=pass value="Password"> <input type=submit> </form> </body> <a href="index.txt"> </html>
import { CloudFrontToS3 } from "@aws-solutions-constructs/aws-cloudfront-s3"; import { aws_certificatemanager, aws_route53, aws_route53_targets, aws_s3_deployment, } from "aws-cdk-lib"; import { Construct } from "constructs"; interface Props { hostedZone: aws_route53.IPublicHostedZone; certificate: aws_certificatemanager.ICertificate; } export class AppDistributionConstruct extends Construct { readonly hostedZone: aws_route53.IPublicHostedZone; constructor(scope: Construct, id: string, props: Props) { super(scope, id); const { hostedZone, certificate } = props; const appDistributionDomainName = `app.${hostedZone.zoneName}`; const { s3Bucket: appBucket, cloudFrontWebDistribution: appDistribution } = new CloudFrontToS3(this, "AppDistribution", { insertHttpSecurityHeaders: false, cloudFrontDistributionProps: { certificate, domainNames: [appDistributionDomainName], }, }); new aws_s3_deployment.BucketDeployment(this, "AppDeployment", { destinationBucket: appBucket!, distribution: appDistribution!, sources: [aws_s3_deployment.Source.asset("sources/app/build")], }); new aws_route53.ARecord(this, "AppRecord", { zone: hostedZone, recordName: `app.${hostedZone.zoneName}`, target: aws_route53.RecordTarget.fromAlias( new aws_route53_targets.CloudFrontTarget(appDistribution) ), }); } }
package uk.co.appsbystudio.geoshare.friends.manager interface FriendsManagerPresenter { fun friends() fun viewpagerItem(item: Int) fun search() fun invalidSession() fun stop() }
<?php namespace App\Http\Controllers; use Illuminate\Http\Request; class ResultController extends Controller { public function result(Request $request){ exec('cd '.app_path().'..\\public', $output, $ret_code); exec('python script.py'. ' '.$request->input('dep').' '.$request->input('arr'). ' '.$request->input('debut').' '.$request->input('fin'). ' '.$request->input('sel'), $output, $ret_code); $f=fopen("b.txt","r"); $res=fgets($f); return view('result',['res'=>$res]); } }
//Autogenerated by SSDCPortal.EntityGenerator using SSDCPortal.Constants; using System; using System.Collections.Generic; using System.ComponentModel; namespace SSDCPortal.Shared.DataInterfaces { public interface IMessage { Int32 Id { get; set; } String UserName { get; set; } String Text { get; set; } DateTime When { get; set; } Guid UserID { get; set; } IApplicationUser Sender { get; set; } } }
-- @testpoint:opengauss关键字command_function_code(非保留),作为角色名 --关键字不带引号-成功 drop role if exists command_function_code; create role command_function_code with password 'gauss@123' valid until '2020-12-31'; --关键字带双引号-成功 drop role if exists "command_function_code"; create role "command_function_code" with password 'gauss@123' valid until '2020-12-31'; --关键字带单引号-合理报错 drop role if exists 'command_function_code'; create role 'command_function_code' with password 'gauss@123' valid until '2020-12-31'; --关键字带反引号-合理报错 drop role if exists `command_function_code`; create role `command_function_code` with password 'gauss@123' valid until '2020-12-31';
package ru.otus.otuskotlin.marketplace.backend.repo.dynamo import ru.otus.otuskotlin.marketplace.backend.repo.test.* import java.util.* /* Никогда так не делайте как в этом тесте. Здесь используется боевая база данных dynamoDB с тестовыми таблицами. В итоге, таблицы создаются после каждой сборки. Если их не подчищать, может набежать довольно крупная сумма за использование dynamoDB в AWS. Тестирование необходимо выполнять с помощью тестового Docker-образа dynamoDb: https://hub.docker.com/r/amazon/dynamodb-local */ //class RepoAdDynamoCreateTest: RepoAdCreateTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //class RepoAdDynamoReadTest: RepoAdReadTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //class RepoAdDynamoUpdateTest: RepoAdUpdateTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //class RepoAdDynamoDeleteTest: RepoAdDeleteTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //// Не проходит с глобальными индексами, индекс не успевает обновиться //class RepoAdDynamoSearchTest: RepoAdSearchTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //}
package com.foryouandme.data.repository.auth.network.request import com.squareup.moshi.Json data class LoginRequest<T>(@Json(name = "user") val user: T) data class PhoneLoginRequest( @Json(name = "phone_number") val phoneNumber: String, @Json(name = "verification_code") val verificationCode: String ) data class PinLoginRequest( @Json(name = "email") val pin: String, )
package br.charles.repository; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; import br.charles.model.Contato; @Repository public interface ContatoRepository extends PagingAndSortingRepository<Contato, String> { public Page<Contato> findAll(Pageable pageable); @Query("SELECT p FROM Contato p " + "WHERE lower(nome) like %:busca% ") public Page<Contato> busca(@Param("busca") String busca, Pageable pageable); }
import 'core/room.dart'; import 'options.dart'; /// Main entry point to connect to a room. /// {@category Room} class LiveKitClient { static const version = '1.0.0'; /// Convenience method for connecting to a LiveKit server. /// Returns a [Room] upon a successful connect or throws when it fails. /// Alternatively, it is possible to instantiate [Room] and call [Room.connect] directly. static Future<Room> connect( String url, String token, { ConnectOptions? connectOptions, RoomOptions? roomOptions, }) async { final room = Room(); try { await room.connect( url, token, connectOptions: connectOptions, roomOptions: roomOptions, ); return room; } catch (error) { await room.dispose(); rethrow; } } }
/* * Copyright 2018 Nazmul Idris. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ object GradlePlugins { data class Versions(val gradle: String = "3.3.0", val kotlin: String = "1.3.20", val junit5: String = "1.2.0.0") val versions = Versions() val gradle = "com.android.tools.build:gradle:${versions.gradle}" val kotlin = "org.jetbrains.kotlin:kotlin-gradle-plugin:${versions.kotlin}" val junit5 = "de.mannodermaus.gradle.plugins:android-junit5:${versions.junit5}" } object Versions { val compile_sdk = 28 val target_sdk = 26 val min_sdk = 16 } object Deps { data class Versions(val arch_comp: String = "2.0.0", val design: String = "1.0.0", val gson: String = "2.8.5", val gms: String = "16.0.0", val places: String = "1.0.0", val dagger2: String = "2.17", val junit5: String = "5.2.0", val crayon: String = "0.1.0") val versions = Versions() val kotlin_stdlib_jdk8 = "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${GradlePlugins.versions.kotlin}" val arch_comp = "androidx.lifecycle:lifecycle-extensions:${versions.arch_comp}" val arch_comp_annotation = "androidx.lifecycle:lifecycle-compiler:${versions.arch_comp}" val material_design = "com.google.android.material:material:${versions.design}" val vector_drawable = "androidx.vectordrawable:vectordrawable:${versions.design}" val recycler_view = "androidx.recyclerview:recyclerview:${versions.design}" val gms_places = "com.google.android.libraries.places:places-compat:${versions.places}" val gms_location = "com.google.android.gms:play-services-location:${versions.gms}" val gson = "com.google.code.gson:gson:${versions.gson}" val dagger2 = "com.google.dagger:dagger:${versions.dagger2}" val dagger2_annotation = "com.google.dagger:dagger-compiler:${versions.dagger2}" val crayon = "com.importre:crayon:${versions.crayon}" } object TestingDeps { data class Versions(val assertj: String = "3.11.1", val junit5: String = "5.2.0", val mockk: String = "1.8.9", val roboelectric: String = "3.8", val junit4: String = "4.12") val versions = Versions() val junit5_jupiter = "org.junit.jupiter:junit-jupiter-api:${versions.junit5}" val junit5_jupiter_runtime = "org.junit.jupiter:junit-jupiter-engine:${versions.junit5}" val junit5_jupiter_params = "org.junit.jupiter:junit-jupiter-params:${versions.junit5}" val junit4_legacy = "junit:junit:${versions.junit4}" val junit5_vintage = "org.junit.vintage:junit-vintage-engine:${versions.junit5}" val assertj = "org.assertj:assertj-core:${versions.assertj}" val mockk = "io.mockk:mockk:${versions.mockk}" val roboelectric = "org.robolectric:robolectric:${versions.roboelectric}" }
# Data Access 数据访问 本单元创建于2015-10-22,用于存储C#中与数据有关的章节内容。 其中将涉及以下几个方面的内容: * **文件系统数据** * **XML** * **LINQ简介** * **应用LINQ**
package org.jim.common.cluster; import java.util.UUID; import org.jim.common.ImPacket; /** * 成员变量group, userid, ip谁有值就发给谁,toAll为true则发给所有<br> * packet是不允许为null的 * @author WChao * 2018年05月20日 下午3:10:29 */ public class ImClusterVo implements java.io.Serializable { private static final long serialVersionUID = 6978027913776155664L; public static final String CLIENTID = UUID.randomUUID().toString(); private ImPacket packet; private String clientId = CLIENTID; private String group; private String userid; private String token; private String ip; /** * ChannelContext'id */ private String channelId; private boolean toAll = false; public ImPacket getPacket() { return packet; } public void setPacket(ImPacket packet) { this.packet = packet; } public String getGroup() { return group; } public void setGroup(String group) { this.group = group; } public String getUserid() { return userid; } public void setUserid(String userid) { this.userid = userid; } public String getIp() { return ip; } public void setIp(String ip) { this.ip = ip; } /** * * @author: WChao */ public ImClusterVo() { } public ImClusterVo(ImPacket packet) { this.packet = packet; } /** * @param args * @author: WChao */ public static void main(String[] args) { } public boolean isToAll() { return toAll; } public void setToAll(boolean toAll) { this.toAll = toAll; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getChannelId() { return channelId; } public void setChannelId(String channelId) { this.channelId = channelId; } public String getToken() { return token; } public void setToken(String token) { this.token = token; } }
--- title: JPA Relation 2 author: Njade date: 2020-12-16 00:25:00 +0900 categories: [JPA] tags: [JPA] --- 이 게시글은 인프런의 [김영한님의 강의](https://www.inflearn.com/course/ORM-JPA-Basic)를 보고 정리한 것입니다. 실전 예제의 팁 등을 제외한 코드는 첨부하지 않습니다. 강의를 봐주세요. --- ## 다양한 연관관계 맵핑 --- * 연관관계 맵핑시 고려사항 3가지 * 다대일 [N:1] * 일대다 [1:N] * 일대일 [1:1] * 다대다 [N:N] --- ## 연관관계 맵핑시 고려사항 3가지 --- ### 다중성 * 다대일: @ManyToOne * 일대다: @OneToMany * 일대일: @OneToOne * 다대다: @ManyToMany > 실무에서 사용하지 말 것 ### 단방향, 양방향 * 테이블: 외래 키 하나로 양쪽 조인이 가능한 방향이라는 개념이 없음 * 객체: 참조용 필드가 있는 쪽만 참조 가능, 한쪽만 참조하면 단방향, 양쪽 참조면 양방향 ### 연관관계의 주인 * 테이블은 외래 키가 하나 * 객체는 참조가 2군데 * 두 객체 중 테이블의 외래 키를 관리할 곳을 정해야 함. * 외래 키를 관리하는 참조가 있는 곳이 주인 * 주인이 아닌 곳은 조회만 가능 * 다대일, 일대다 등에서 앞에 나오는 것이 주인 --- ## 다대일 [N:1] --- * 다대일 단방향상황에서 다대일 양방향으로의 확장은 테이블에 영향을 주지 않고 코드상으로만 추가가 가능 * 외래키가 있는 쪽이 연관관계의 주인 --- ## 일대다 [1:N] --- * 1이 주인 * 일반적으로 권장하지 않음. * 테이블에서 생각하면 N쪽에 무조건 외래키가 들어감. * 이 경우 1쪽 객체가 바뀌면 자신의 테이블이 아닌 다른 테이블로 sql이 실행되어 쿼리가 한 번 더 나감. * 객체와 테이블의 관계를 명확하게 파악하지 않으면 코드와 sql이 맵핑되지 않아 해석상의 어려움이 생길 수 있음. * 객체지향적으로는 살짝 부적절하더라도 DB설계에 맞춰 다대일 관계로 설계로 바꾸는 것이 좋음. * @JoinColumn을 꼭 사용하여야 하며 이를 사용하지 않으면 조인 테이블을 사용하게 됨. (테이블이 하나 더 생김.) * 일대다 양방향은 공식적으로 존재하지 않지만 사용은 가능하다. ```java @ManyToOne @JoinColumn(insertable = false, updatable = false) ``` * 위 두개의 어노테이션을 통해 읽기 전용 필드를 사용해서 양방향처럼 사용하는 방법이다. --- ## 일대일 [1:1] --- * 일대일은 반대도 일대일 * 주 테이블이나 대상 테이블 중에 외래 키 선택이 가능 * 외래 키에 DB에 유니크 제약조건이 추가되어야 한다. * 다대일과 유사함. * 외래키가 있는 곳이 연관관계의 주인 * 반대편은 mappedBy 적용. * 일대일이지만 대상 테이블에 외래키가 있는 단방향의 경우 JPA가 지원해주지 않음. * 양방향인 경우에는 대상 테이블에 외래키가 있으면 가능하지만 사실 일대일 주 테이블의 양방향과 동일. ### 주 테이블에 외래키 * 개발상에서는 주 테이블에 외래키를 가지고 있는 경우 JPA 맵핑이 편리하고 추가 쿼리를 실행할 필요가 없어 이점이 있을 수 있다. * 값이 없으면 외래키에 null이 들어가는 등의 단점이 있을 수 있다. ### 대상 테이블에 외래 키 * DB관점에서 일대다가 되는 경우 테이블 구조가 유지될 수 있다. * 프록시 기능의 한계로 지연 로딩으로 설정하여도 어차피 쿼리해봐야 결과를 알 수 있기 때문에 항상 즉시 로딩된다. --- ## 다대다 [N:M] --- * 실무에서는 사용하지 말 것. * RDB에서 정규화된 테이블은 다대다가 표현이 안됨. * 연결 테이블을 사용해서 일대다, 다대일 관계로 표현해야 함. * 객체는 컬렉션을 사용해서 객체 2개로 다대다 관계가 가능. * @ManyToMany, @JoinTable로 지정이 가능하며 단방향, 양방향이 가능하다. * 양방향은 동일하게 mappedBy를 사용해야함. * 실무에서는 연결 테이블이 연결만 하고 끝나는 일이 없고 추가정보가 꼭 들어가기 때문에 사용하지 않는 것이 좋음. * 연결 테이블용 엔티티를 만드는 것이 좋다. * 연결 테이블도 PK는 의미없는 sequence를 사용하는 것이 유연해질 수 있다. --- ## 실전 예제 --- * JPA는 parent와 같은 형태의 셀프 맵핑도 가능함. * 실무에서 중간 테이블은 단순하지 않으므로 @ManyToMany를 사용하지 말 것. * @JoinColumn은 외래키를 맵핑할 때 사용 * name: 매핑할 외래 키 이름 * referencedColumnName: 외래 키가 참조하는 대상 테이블의 컬럼명 * foreignKey(DDL): 외래키 제약조건을 직접 지정 * 이외에는 @Column과 동일 * @ManyToOne * optional: 기본값 true * fetch: 기본값 EAGER * cascaed: 영속성 전이 기능 * targetEntity: 연관된 엔티티의 타입 정보를 설정, 거의 사용하지 않음. 컬렉션 제네릭으로 타입 정보 추론. * 스펙상 mappedBy가 없음. > 무조건 주인이 되어야 함. * @OneToMany * mappedBy: 주인 필드 선택 * fetch: 기본값 LAZY * cascaed: 영속성 전이 기능 * targetEntity: 연관된 엔티티의 타입 정보를 설정, 거의 사용하지 않음. 컬렉션 제네릭으로 타입 정보 추론. --- # Reference --- - [인프런](https://www.inflearn.com/course/ORM-JPA-Basic)
from pyspark.sql.functions import col """Toy join function to showcase spark functions.""" def join_dataframes(left, right, columns_left, columns_right, join_type='inner'): if len(columns_left) == len(columns_right) and len(columns_left) > 0: cond = [col(left_col) == col(right_col) for (left_col, right_col) in zip(columns_left, columns_right)] return left.join(right, cond, join_type) raise Error('Columns parameters don\'t match or empty')
--- title: Good News date: 2017-03-11 14:45:00 Z tags: - shopify - e-commerce - design - fashion - clothing - apparel - shoes - trainers image: "/uploads/[email protected]" store-link: https://goodnews.london credit: Not credit-link: http://not-studio.com ---
#!ruby start_num = ARGV[0].hex end_num = ARGV[1].hex start_num.upto(end_num) do |n| puts sprintf('0x%04x', n) end
package xyz.gillall.demoapp.ui.pixabay.videogallery import android.os.Bundle import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import androidx.databinding.DataBindingUtil import androidx.fragment.app.Fragment import androidx.navigation.NavController import androidx.navigation.fragment.NavHostFragment import org.koin.androidx.viewmodel.ext.android.getViewModel import xyz.gillall.demoapp.R import xyz.gillall.demoapp.databinding.FragmentVideoGalleryBinding class VideoGalleryFragment : Fragment() { private lateinit var binding: FragmentVideoGalleryBinding private lateinit var navController: NavController private lateinit var viewModel: VideoGalleryViewModel override fun onCreateView( inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle? ): View { viewModel = getViewModel() binding = DataBindingUtil .inflate(inflater, R.layout.fragment_video_gallery, container, false) binding.root.context binding.lifecycleOwner = this binding.viewModel = viewModel viewModel.updateByViewModel.observe(viewLifecycleOwner, { when (it.action) { "update" -> binding.viewModel = viewModel } }) navController = NavHostFragment.findNavController(this) return binding.root } }
<?php /** * from php.net. */ /** * Encapsulates a closure. */ final class Delegate { private $_Closure; /** * construct. * * @param Callable $closure */ public function __construct($closure) { // $this->_Closure = \Closure::fromCallable($closure); // $this->_Closure = new \Closure::($closure); $this->_Closure = $closure; } /** * Allows to call the delegate object directly. * * @param list ...$args variable numbers of arguments. * * @return mixed */ public function __invoke(...$args) { return call_user_func_array($this->_Closure, $args); } } /** * defines a type for event arguments. */ class EventArgs { protected $_Sender; /** * construct. * * @param mixed $sender */ public function __construct($sender = null) { $this->_Sender = $sender; } /** * property-read. * * @return object should contain the event emitting object. */ final public function Sender() { return $this->_Sender; } } /** * a basic event type for the delegate. */ class Event { private $_Receivers = array(); /** * Undocumented function * * @param Delegate $delegate * * @return Event */ final public function Add(Delegate $delegate) { $this->_Receivers[] = $delegate; return $this; } /** * fires the event. * * @param EventArgs $args * * @return void */ final public function Trigger(EventArgs $args) { foreach ($this->_Receivers as $delegate) { $delegate($args); } } } // declare anonymous function as delegate. $myDelegate = new Delegate(function(EventArgs $args) { echo 'anonymous function' . PHP_EOL; }); // declare event, assign the delegate, trigger event. $myEvent = new Event(); $myEvent->Add($myDelegate); /** * Defines a simple type that can handle events. */ class DemoEventHandler { public function onEvent(EventArgs $args) { echo 'class event handler' . PHP_EOL; } } // test event handler $controller = new DemoEventHandler(); $myEvent->Add(new Delegate(array($controller, 'onEvent'))); $myEvent->Trigger(new EventArgs($myEvent));
class ChangeSpaceObjCol < ActiveRecord::Migration[6.0] def change rename_column :album_space_objs, :object_id, :space_obj_id end end
module OodJob # A class that handles the communication with a resource manager for # submitting/statusing/holding/deleting jobs # @abstract class Adapter # The root exception class that all {Adapter} exceptions inherit from class Error < StandardError; end # The cluster used in submitting, querying status, and controlling jobs # @return [OodCluster::Cluster] cluster to communicate with attr_reader :cluster # @param cluster [OodCluster::Cluster] cluster that job is submitted to def initialize(cluster:, **_) @cluster = cluster end # Submit a job with the attributes defined in the job template instance # @abstract Subclass is expected to implement {#submit} # @raise [NotImplementedError] if subclass did not define {#submit} # @example Submit job template to cluster # solver_id = OodJob::Job.submit(script: solver_script) # #=> "1234.server" # @example Submit job that depends on previous job # post_id = OodJob::Job.submit( # script: post_script, # afterok: solver_id # ) # #=> "1235.server" # @param script [Script] script object that describes the script and # attributes for the submitted job # @param after [#to_s, Array<#to_s>] this job may be scheduled for execution # at any point after dependent jobs have started execution # @param afterok [#to_s, Array<#to_s>] this job may be scheduled for # execution only after dependent jobs have terminated with no errors # @param afternotok [#to_s, Array<#to_s>] this job may be scheduled for # execution only after dependent jobs have terminated with errors # @param afterany [#to_s, Array<#to_s>] this job may be scheduled for # execution after dependent jobs have terminated # @return [String] the job id returned after successfully submitting a job def submit(script:, after: [], afterok: [], afternotok: [], afterany: []) raise NotImplementedError, "subclass did not define #submit" end # Retrieve job info from the resource manager # @abstract Subclass is expected to implement {#info} # @raise [NotImplementedError] if subclass did not define {#info} # @param id [#to_s] the id of the job, otherwise get list of all jobs # running on cluster # @return [Info, Array<Info>] information describing submitted job def info(id: '') raise NotImplementedError, "subclass did not define #info" end # Retrieve job status from resource manager # @note Optimized slightly over retrieving complete job information from server # @abstract Subclass is expected to implement {#status} # @raise [NotImplementedError] if subclass did not define {#status} # @param id [#to_s] the id of the job # @return [Status] status of job def status(id:) raise NotImplementedError, "subclass did not define #status" end # Put the submitted job on hold # @abstract Subclass is expected to implement {#hold} # @raise [NotImplementedError] if subclass did not define {#hold} # @param id [#to_s] the id of the job # @return [void] def hold(id:) raise NotImplementedError, "subclass did not define #hold" end # Release the job that is on hold # @abstract Subclass is expected to implement {#release} # @raise [NotImplementedError] if subclass did not define {#release} # @param id [#to_s] the id of the job # @return [void] def release(id:) raise NotImplementedError, "subclass did not define #release" end # Delete the submitted job # @abstract Subclass is expected to implement {#delete} # @raise [NotImplementedError] if subclass did not define {#delete} # @param id [#to_s] the id of the job # @return [void] def delete(id:) raise NotImplementedError, "subclass did not define #delete" end private # Reduce an array to unique objects with count # ["a", "a", "b"] #=> {"a" => 2, "b" => 1} def uniq_array(ary) ary.group_by {|v| v}.each_with_object({}) {|(k, v), h| h[k] = v.size} end end end
#!/usr/bin/perl # Copyright (c) 2021 Tom Hancocks # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Trim function sub trim { my $str = $_[0]; $str =~ s/^\s+|\n+//g; return $str; } # We need to extract certain information about the binary and the application # bundle that we are installing. my ($bin_path) = @ARGV; if (!defined $bin_path) { die("You must provide a mach-o binary."); } my $macos_path = trim(`dirname ${bin_path}`); my $contents_path = trim(`dirname ${macos_path}`); my $frameworks_path = trim("${contents_path}/Frameworks"); # Make sure the frameworks directory actually exists in the application bundle. `mkdir -p ${frameworks_path}`; # Setup a selection of functions that are responsible for moving files and # altering linking. sub install_name_tool { local ($path, $dylib_path) = ($_[0], $_[1]); local $dylib_name = trim(`basename ${dylib_path}`); local $dylib_link_path = "\@executable_path/../Frameworks/${dylib_name}"; `install_name_tool -change "${dylib_path}" "${dylib_link_path}" "${path}"`; } sub get_dylib_install_path { local ($dylib_name) = (trim(`basename $_[0]`)); return "${frameworks_path}/${dylib_name}"; } sub copy_dylib { local ($dylib_path, $dylib_name) = ($_[0], get_dylib_install_path($_[0])); `cp -v ${dylib_path} ${dylib_install_path}`; `chmod 0755 ${dylib_install_path}`; return $dylib_install_path; } # Setup a subroutine to handle the actual DYLIB installation. This is # unfortunately a recursive operation, as actual DYLIBs can reference other # DYLIBs. sub install_dylib { local $base = $_[0]; local @result = split /\n/, `otool -L ${base}`; while (local $dylib = shift(@result)) { local $dylib_path = trim((split / /, $dylib)[0]); local $dylib_install_path = get_dylib_install_path($dylib_path); if ($dylib_install_path eq $base) { install_name_tool($dylib_install_path, $dylib_path); } # Check if the DYLIB is a user one (located in a brew install location) elsif ((rindex $dylib_path, "/usr/local") == 0) { local $dylib_install_path = copy_dylib($dylib_path); print("Installing DYLIB to ${dylib_install_path}\n"); install_name_tool($base, $dylib_path); install_dylib($dylib_install_path); } } } install_dylib($bin_path);
package m54tom55 import ( "testing" "github.com/gogo/protobuf/proto" "github.com/pkg/errors" "github.com/stackrox/rox/generated/storage" "github.com/stackrox/rox/pkg/testutils" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" bolt "go.etcd.io/bbolt" ) var ( // Sections are not essential for the test // but are required for a policy to be valid. sections = []*storage.PolicySection{ { PolicyGroups: []*storage.PolicyGroup{ { FieldName: "CVSS", Values: []*storage.PolicyValue{ { Value: ">= 7.000000", }, }, }, }, }, } exclusions = []*storage.Exclusion{ { Name: "42", }, } originalPolicies = []*storage.Policy{ { Id: "0", Name: "policy 0 with no whitelists", PolicyVersion: oldVersion, PolicySections: sections, }, { Id: "1", Name: "policy 1 with a whitelist", PolicyVersion: oldVersion, PolicySections: sections, Whitelists: exclusions, }, { Id: "2", Name: "policy 2 with both a whitelist and an exclusion", PolicyVersion: oldVersion, PolicySections: sections, Whitelists: exclusions, Exclusions: exclusions, }, { Id: "3", Name: "policy 3 with an exclusion but the old version", PolicyVersion: oldVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "4", Name: "policy 4 with an exclusion and the new version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "5", Name: "policy 5 with no exclusion and and the new version", PolicyVersion: newVersion, PolicySections: sections, }, { Id: "6", Name: "policy 6 with a whitelist and the new version", PolicyVersion: newVersion, PolicySections: sections, Whitelists: exclusions, }, } expectedPolicies = []*storage.Policy{ { Id: "0", Name: "policy 0 with no whitelists", PolicyVersion: newVersion, PolicySections: sections, }, { Id: "1", Name: "policy 1 with a whitelist", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "2", Name: "policy 2 with both a whitelist and an exclusion", PolicyVersion: newVersion, PolicySections: sections, Exclusions: append(exclusions, exclusions...), }, { Id: "3", Name: "policy 3 with an exclusion but the old version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "4", Name: "policy 4 with an exclusion and the new version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "5", Name: "policy 5 with no exclusion and and the new version", PolicyVersion: newVersion, PolicySections: sections, }, { Id: "6", Name: "policy 6 with a whitelist and the new version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, } ) func TestPolicyMigration(t *testing.T) { db := testutils.DBForT(t) err := db.Update(func(tx *bolt.Tx) error { bucket, err := tx.CreateBucket(policyBucket) if err != nil { return err } for _, policy := range originalPolicies { bytes, err := proto.Marshal(policy) if err != nil { return err } if err := bucket.Put([]byte(policy.GetId()), bytes); err != nil { return err } } return nil }) require.NoError(t, err, "Prepare test policy bucket") err = migrateWhitelistsToExclusions(db) require.NoError(t, err, "Run migration") var migratedPolicies []*storage.Policy err = db.View(func(tx *bolt.Tx) error { bucket := tx.Bucket(policyBucket) if bucket == nil { return errors.Errorf("bucket %q not found", policyBucket) } return bucket.ForEach(func(_, obj []byte) error { policy := &storage.Policy{} if err := proto.Unmarshal(obj, policy); err != nil { return err } migratedPolicies = append(migratedPolicies, policy) return nil }) }) require.NoError(t, err, "Read migrated policies from the bucket") assert.ElementsMatch(t, expectedPolicies, migratedPolicies) }
package activerecord import ( "context" "fmt" "strings" "github.com/activegraph/activegraph/activesupport" ) type ErrUnknownPrimaryKey struct { PrimaryKey string Description string } func (e *ErrUnknownPrimaryKey) Error() string { return fmt.Sprintf("Primary key is unknown, %s", e.Description) } type R struct { rel *Relation tableName string primaryKey string attrs attributesMap assocs associationsMap validators validatorsMap reflection *Reflection connections *connectionHandler } // TableName sets the table name explicitly. // // Vertex := activerecord.New("vertex", func(r *activerecord.R) { // r.TableName("vertices") // }) func (r *R) TableName(name string) { r.tableName = name } func (r *R) PrimaryKey(name string) { r.primaryKey = name } func (r *R) DefineAttribute(name string, t Type, validators ...AttributeValidator) { r.attrs[name] = attr{Name: name, Type: t} r.validators.include(name, typeValidator{t}) r.validators.include(name, validators...) } func (r *R) Validates(name string, validator AttributeValidator) { if v, ok := validator.(activesupport.Initializer); ok { err := v.Initialize() if err != nil { panic(err) } // activesupport.Err(err).Unwrap() } r.validators.include(name, validator) } func (r *R) ValidatesPresence(names ...string) { r.validators.extend(names, new(Presence)) } func (r *R) BelongsTo(name string, init ...func(*BelongsTo)) { assoc := BelongsTo{targetName: name, owner: r.rel, reflection: r.reflection} switch len(init) { case 0: case 1: init[0](&assoc) default: panic(activesupport.ErrMultipleVariadicArguments{Name: "init"}) } r.attrs[assoc.AssociationForeignKey()] = attr{ Name: assoc.AssociationForeignKey(), Type: Nil{new(Int64)}, } r.assocs[name] = &assoc } func (r *R) HasMany(name string) { // TODO: Define library methods to pluralize words. targetName := strings.TrimSuffix(name, "s") // Use plural name for the name of attribute, while target name // of the association should be in singular (to find a target relation // through the reflection. r.assocs[name] = &HasMany{ targetName: targetName, owner: r.rel, reflection: r.reflection, } } func (r *R) HasOne(name string) { r.assocs[name] = &HasOne{targetName: name, owner: r.rel, reflection: r.reflection} } func (r *R) init(ctx context.Context, tableName string) error { conn, err := r.connections.RetrieveConnection(primaryConnectionName) if err != nil { return err } definitions, err := conn.ColumnDefinitions(ctx, tableName) if err != nil { return err } for _, column := range definitions { columnType := column.Type if !column.NotNull { columnType = Nil{columnType} } r.DefineAttribute(column.Name, columnType) if column.IsPrimaryKey { r.PrimaryKey(column.Name) } } return nil } type Relation struct { name string tableName string // TODO: add *Reflection property. // reflection *Reflection conn Conn connections *connectionHandler scope *attributes query *QueryBuilder ctx context.Context associations validations AttributeMethods } func New(name string, init ...func(*R)) *Relation { var ( rel *Relation err error ) switch len(init) { case 0: rel, err = Initialize(name, nil) case 1: rel, err = Initialize(name, init[0]) default: panic(&activesupport.ErrMultipleVariadicArguments{Name: "init"}) } if err != nil { panic(err) } return rel } func Initialize(name string, init func(*R)) (*Relation, error) { rel := &Relation{name: name} r := R{ rel: rel, assocs: make(associationsMap), attrs: make(attributesMap), validators: make(validatorsMap), reflection: globalReflection, connections: globalConnectionHandler, } err := r.init(context.TODO(), name+"s") if err != nil { return nil, err } if init != nil { init(&r) } // When the primary key was assigned to record builder, mark it explicitely // wrapping with PrimaryKey structure. Otherwise, fallback to the default primary // key implementation. if r.primaryKey != "" { attr, ok := r.attrs[r.primaryKey] if !ok { return nil, &ErrUnknownPrimaryKey{r.primaryKey, "not in attributes"} } r.attrs[r.primaryKey] = PrimaryKey{Attribute: attr} } if r.tableName == "" { r.tableName = name + "s" } // The scope is empty by default. scope, err := newAttributes(name, r.attrs.copy(), nil) if err != nil { return nil, err } assocs := newAssociations(name, r.assocs.copy(), r.reflection) validations := newValidations(r.validators.copy()) // Create the model schema, and register it within a reflection instance. rel.tableName = r.tableName rel.scope = scope rel.associations = *assocs rel.validations = *validations rel.connections = r.connections rel.query = &QueryBuilder{from: r.tableName} rel.AttributeMethods = scope r.reflection.AddReflection(name, rel) return rel, nil } func (rel *Relation) TableName() string { return rel.tableName } func (rel *Relation) Name() string { return rel.name } func (rel *Relation) Copy() *Relation { scope := rel.scope.copy() return &Relation{ name: rel.name, tableName: rel.tableName, conn: rel.Connection(), connections: rel.connections, scope: rel.scope.copy(), query: rel.query.copy(), ctx: rel.ctx, associations: *rel.associations.copy(), validations: *rel.validations.copy(), AttributeMethods: scope, } } func (rel *Relation) empty() *Relation { rel.scope, _ = newAttributes(rel.name, nil, nil) return rel } // IsEmpty returns true if there are no records. func (rel *Relation) IsEmpty() bool { // TODO: implement the method. return false } func (rel *Relation) Context() context.Context { if rel.ctx == nil { return context.Background() } return rel.ctx } func (rel *Relation) WithContext(ctx context.Context) *Relation { newrel := rel.Copy() newrel.ctx = ctx return newrel } func (rel *Relation) Connect(conn Conn) *Relation { newrel := rel.Copy() newrel.conn = conn return newrel } func (rel *Relation) Connection() Conn { if rel.conn != nil { return rel.conn } conn, err := rel.connections.RetrieveConnection(primaryConnectionName) if err != nil { return &errConn{err} } return conn } func (rel *Relation) New(params ...map[string]interface{}) Result { switch len(params) { case 0: return Return(rel.Initialize(nil)) case 1: return Return(rel.Initialize(params[0])) default: return Err(&activesupport.ErrMultipleVariadicArguments{Name: "params"}) } } func (rel *Relation) Initialize(params map[string]interface{}) (*ActiveRecord, error) { attributes := rel.scope.clear() err := attributes.AssignAttributes(params) if err != nil { return nil, err } rec := &ActiveRecord{ name: rel.name, tableName: rel.tableName, conn: rel.Connection(), attributes: attributes, associations: rel.associations.copy(), validations: *rel.validations.copy(), } return rec.init(), nil } func (rel *Relation) Create(params map[string]interface{}) Result { return Return(rel.Initialize(params)).Insert() } func (rel *Relation) ExtractRecord(h activesupport.Hash) (*ActiveRecord, error) { var ( attrNames = rel.scope.AttributeNames() columnNames = rel.scope.ColumnNames() ) params := make(activesupport.Hash, len(attrNames)) for i, colName := range columnNames { attrName := attrNames[i] attr := rel.scope.AttributeForInspect(attrName) attrValue, err := attr.AttributeType().Deserialize(h[colName]) if err != nil { return nil, err } params[attrName] = attrValue } return rel.Initialize(params) } // PrimaryKey returns the attribute name of the record's primary key. func (rel *Relation) PrimaryKey() string { return rel.scope.PrimaryKey() } func (rel *Relation) All() CollectionResult { return ReturnCollection(rel, nil) } // TODO: move to the Schema type all column-related methods. func (rel *Relation) ColumnNames() []string { return rel.scope.ColumnNames() } func (rel *Relation) Each(fn func(*ActiveRecord) error) error { q := rel.query.copy() q.Select(rel.ColumnNames()...) // Include all join dependencies into the query with fully-qualified column // names, so each part of the request can be extracted individually. for _, join := range rel.query.joinValues { q.Select(join.Relation.ColumnNames()...) } var lasterr error err := rel.Connection().ExecQuery(rel.Context(), q.Operation(), func(h activesupport.Hash) bool { rec, e := rel.ExtractRecord(h) if lasterr = e; e != nil { return false } for _, join := range rel.query.joinValues { arec, e := join.Relation.ExtractRecord(h) if lasterr = e; e != nil { return false } e = rec.AssignAssociation(join.Relation.Name(), arec) if lasterr = e; e != nil { return false } } if lasterr = fn(rec); lasterr != nil { return false } return true }) if lasterr != nil { return lasterr } return err } func (rel *Relation) Where(cond string, arg interface{}) *Relation { newrel := rel.Copy() // When the condition is a regular column, pass it through the regular // column comparison instead of query chain predicates. if newrel.scope.HasAttribute(cond) { // newrel.scope.AssignAttribute(cond, arg) newrel.query.Where(fmt.Sprintf("%s = ?", cond), arg) } else { newrel.query.Where(cond, arg) } return newrel } // Select allows to specify a subset of fields to return. // // Method returns a new relation, where a set of attributes is limited by the // specified list. // // Model.Select("field", "other_field") // // #<Model id: 1, field: "value", other_field: "value"> // // Accessing attributes of a Record that do not have fields retrieved by a select // except id with return nil. // // model, _ := Model.Select("field").Find(1) // model.Attribute("other_field") // Returns nil func (rel *Relation) Select(attrNames ...string) *Relation { newrel := rel.Copy() if !newrel.scope.HasAttributes(attrNames...) { return newrel.empty() } attrMap := make(map[string]struct{}, len(attrNames)) for _, attrName := range attrNames { attrMap[attrName] = struct{}{} } for _, attrName := range newrel.scope.AttributeNames() { if _, ok := attrMap[attrName]; !ok { newrel.scope.ExceptAttribute(attrName) } } return newrel } func (rel *Relation) Group(attrNames ...string) *Relation { newrel := rel.Copy() // When the attribute is not part of the scope, return an empty relation. if !newrel.scope.HasAttributes(attrNames...) { return newrel.empty() } newrel.query.Group(attrNames...) return newrel } // Limit specifies a limit for the number of records to retrieve. // // User.Limit(10) // Generated SQL has 'LIMIT 10' func (rel *Relation) Limit(num int) *Relation { newrel := rel.Copy() newrel.query.Limit(num) return newrel } func (rel *Relation) Joins(assocNames ...string) *Relation { newrel := rel.Copy() for _, assocName := range assocNames { association := newrel.ReflectOnAssociation(assocName) if association == nil { return newrel.empty() } newrel.query.Join(association.Relation.Copy(), association.Association) } return newrel } func (rel *Relation) Find(id interface{}) Result { var q QueryBuilder q.From(rel.TableName()) q.Select(rel.scope.AttributeNames()...) // TODO: consider using unified approach. q.Where(fmt.Sprintf("%s = ?", rel.PrimaryKey()), id) var rows []activesupport.Hash if err := rel.Connection().ExecQuery(rel.Context(), q.Operation(), func(h activesupport.Hash) bool { rows = append(rows, h) return true }); err != nil { return Err(err) } if len(rows) != 1 { return Err(ErrRecordNotFound{PrimaryKey: rel.PrimaryKey(), ID: id}) } return rel.New(rows[0]) } // FindBy returns a record matching the specified condition. // // person := Person.FindBy("name", "Bill") // // Ok(Some(#<Person id: 1, name: "Bill", occupation: "retired">)) // // person := Person.FindBy("salary > ?", 10000) // // Ok(Some(#<Person id: 3, name: "Jeff", occupation: "CEO">)) func (rel *Relation) FindBy(cond string, arg interface{}) Result { return rel.Where(cond, arg).First() } // First find returns the first record. func (rel *Relation) First() Result { records, err := rel.Limit(1).ToA() if err != nil { return Err(err) } switch len(records) { case 0: return Ok(None) default: return Ok(Some(records[0])) } } func (rel *Relation) InsertAll(params ...map[string]interface{}) ( rr []*ActiveRecord, err error, ) { rr = make([]*ActiveRecord, 0, len(params)) for _, h := range params { rec, err := rel.Initialize(h) if err != nil { return nil, err } rr = append(rr, rec) } if err = rel.connections.Transaction(rel.Context(), func() error { for i, rec := range rr { if rr[i], err = rec.Insert(); err != nil { return err } } return nil }); err != nil { return nil, err } return rr, nil } // ToA converts Relation to array. The method access database to retrieve objects. func (rel *Relation) ToA() (Array, error) { var rr Array if err := rel.Each(func(r *ActiveRecord) error { rr = append(rr, r) return nil }); err != nil { return nil, err } return rr, nil } // ToSQL returns sql statement for the relation. // // User.Where("name", "Oscar").ToSQL() // // SELECT * FROM "users" WHERE "name" = ? func (rel *Relation) ToSQL() string { return rel.query.String() } func (rel *Relation) String() string { var buf strings.Builder fmt.Fprintf(&buf, "%s(", strings.Title(rel.name)) attrs := rel.AttributesForInspect() for i, attr := range attrs { fmt.Fprintf(&buf, "%s: %s", attr.AttributeName(), attr.AttributeType()) if i < len(attrs)-1 { fmt.Fprint(&buf, ", ") } } fmt.Fprintf(&buf, ")") return buf.String() }
from compas.geometry.primitives.frame import Frame class BeamStorage(object): def __init__(self, frame=None, y_count=5, y_spacing=140, z_spacing=140): # type: (Frame, int, float, float) -> None """Frame should have X pointing along beam length and Z pointing to world Z""" self.frame = frame # type: (Frame) # Frame where the self.y_count = y_count self.y_spacing = y_spacing self.z_spacing = z_spacing def to_data(self): """Simpliest way to get this class serialized. """ return self.data @classmethod def from_data(cls, data): """Construct a Movement from structured data. Subclass must add their properity to the data properity. """ beamstorage = cls() beamstorage.data = data return beamstorage @property def data(self): data = {} data['frame'] = self.frame data['y_count'] = self.y_count data['y_spacing'] = self.y_spacing data['z_spacing'] = self.z_spacing return data @data.setter def data(self, data): self.frame = data.get('frame', Frame.worldXY()) self.y_count = data.get('y_count', 5) self.y_spacing = data.get('y_spacing', 140) self.z_spacing = data.get('z_spacing', 140) def get_storage_frame(self, beam_seq, total_beam_count = 0): # type(int) -> Frame """Get the storage frame of a particular beam based on the sequence number (zero start) The algorithm is a simple Y first and then Z. The returned frame have X pointing along the beam length and Z pointing to world Up. You can align the grasp face's face_frame such that the beam is stored in the same orientation with the gripping direction. and optionally compensate the depth of the beam by moving the beam up. """ # Reverse the order (since we pick form the top) if total_beam_count > 0 : beam_seq = total_beam_count - beam_seq - 1 y = (beam_seq % self.y_count) z = beam_seq // self.y_count y_offset = y * self.y_spacing z_offset = z * self.z_spacing transform_vector = self.frame.yaxis.unitized().scaled(y_offset) + self.frame.zaxis.unitized().scaled(z_offset) return Frame(self.frame.point + transform_vector, self.frame.xaxis.copy(), self.frame.yaxis.copy())
# -*- coding: utf-8 -*- import requests from lxml import etree import SaveData import random class Get_album_and_aongs: ''' 通过专辑号获取专辑信息和歌曲信息 ''' def __init__(self, album_id, proxy_pool): self.album_id = album_id self.proxy_pool = proxy_pool def get_album_and_songs(self): ''' 该函数用于通过专辑号获取专辑信息 以及该专辑包含的所有歌曲信息 将获取的信息保存到数据库 ''' url = 'http://music.163.com/album?id=' + self.album_id headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0'} ip = random.choice(self.proxy_pool) proxies = {'http': ip} try: r = requests.get(url, headers=headers, proxies=proxies, timeout=3) #请求一张专辑的歌曲列表页面 if r.status_code == 404: return -1 else: r.raise_for_status() html = etree.HTML(r.text) '''解析获取专辑信息,包括专辑号、专辑名、歌手号、歌手名、发行时间和发行单位''' album_info = {} album_info['album_id'] = self.album_id album_info['album_name'] = html.xpath("//h2[@class='f-ff2']/text()") album_info['singer_id'] = html.xpath("//p[@class='intr']//a/@href")[0].replace('/artist?id=', '') album_info['singer_name'] = html.xpath("//p[@class='intr']//a/text()") album_info['release_time'] = html.xpath("//p[@class='intr']/text()")[0] if len(html.xpath("//p[@class='intr']/text()")) > 1: album_info['release_company'] = html.xpath("//p[@class='intr']/text()")[1].strip() else: album_info['release_company'] = '无' #有些专辑没有标明发行单位,此类统统用‘无’表示 '''解析获取一张专辑的所有歌曲信息''' songs_info = [] for i in range(len(html.xpath("//ul[@class='f-hide']/li"))): '''解析获取一首歌的信息,包括歌曲号、歌曲名、所属专辑号、所属专辑名''' song_info = {} song_info['song_id'] = html.xpath("//ul[@class='f-hide']/li/a/@href")[i].replace('/song?id=', '') song_info['song_name'] = html.xpath("//ul[@class='f-hide']/li/a/text()")[i] song_info['album_id'] = album_info['album_id'] song_info['album_name'] = album_info['album_name'] songs_info.append(song_info) '''调用函数,保存专辑信息和歌曲信息到数据库''' SaveData.save_album_info(album_info) SaveData.save_songs_info(songs_info) print("专辑id为"+ self.album_id +"的信息获取完毕") return 1 except: print("专辑id为"+ self.album_id +"的信息获取失败") print("正在重新获取") return None
/** * Copyright 2018 gd Author. All Rights Reserved. * Author: Chuck1024 */ package discovery import ( "github.com/chuck1024/gd/service" ) var ( defaultConf = "conf/conf.ini" ) type DogDiscovery interface { Start() error Close() Watch(key, node string) error WatchMulti(nodes map[string]string) error AddNode(key string, info service.NodeInfo) DelNode(key string, addr string) GetNodeInfo(key string) (nodesInfo []service.NodeInfo) }
import { PublicKey } from '@solana/web3.js'; import { getPayer, getRpcUrl} from '../utils'; import { Connection, NodeWallet, programs, actions } from '@metaplex/js'; async function getVaultInfo(vaultAddress) { const rpcUrl = await getRpcUrl(); let connection = new Connection(rpcUrl, 'confirmed'); const vault = await programs.vault.Vault.load(connection, vaultAddress); console.log(vault.data.authority); } getVaultInfo(new PublicKey("AvLtCwsoqXe2jr2rQ1wwvXF8LD6g9PcR8Qz8ygy5ARmF"))
package net.apptronic.test.commons_sample_compat_app import android.content.Intent import android.os.Bundle import net.apptronic.test.commons_sample_compat_app.about.AboutActivity import net.apptronic.test.commons_sample_compat_app.data.UserData import net.apptronic.test.commons_sample_compat_app.fragments.dialog.SampleDialog import net.apptronic.test.commons_sample_compat_app.fragments.enterdata.EnterDataFragment import net.apptronic.test.commons_sample_compat_app.fragments.showdata.KEY_USER_DATA import net.apptronic.test.commons_sample_compat_app.fragments.showdata.ShowDataFragment class RouterImpl(private val mainActivity: MainActivity) : Router { override fun openAbout() { mainActivity.startActivity(Intent(mainActivity, AboutActivity::class.java)) } override fun openDialog() { mainActivity.supportFragmentManager.beginTransaction() .add(SampleDialog(), null) .commit() } override fun goToEnterData() { mainActivity.replaceFragmentWithAddToBackStack(EnterDataFragment()) } override fun goToShowUserData(data: UserData) { mainActivity.replaceFragmentWithAddToBackStack(ShowDataFragment().apply { arguments = Bundle().apply { putSerializable(KEY_USER_DATA, data) } }) } }
import json from enum import Enum, auto from . import profile, oauth class OutputFormat(Enum): json = auto() shell = auto() config = auto() def output(fmt: OutputFormat, tokens: oauth.Tokens, **kwargs: str) -> None: if fmt == OutputFormat.json: print(json.dumps(tokens._asdict(), indent=4)) elif fmt == OutputFormat.shell: print(f"export ACCESS_TOKEN={tokens.access_token}") print(f"export ID_TOKEN={tokens.id_token}") elif fmt == OutputFormat.config: profile.set_credentials(profile_name=kwargs["profile"], credentials=tokens) else: raise ValueError(f"Output format {format} not implemented")
/* * Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.javadsl.api import org.scalatest.{ Inside, Matchers, WordSpec } class ScalaSupportSpec extends WordSpec with Matchers with Inside { "scala support" should { "resolve a function" in { val method: ScalaServiceSupport.ScalaMethodCall[String] = testMethod _ method.method.getDeclaringClass should ===(this.getClass) method.method.getName should ===("testMethod") } } def testMethod(s: String): String = s }
--- layout: default title: Ferramentas description: Ferramentas utilizadas pela nossa empresa! --- ## Ferramentas [Trello](https://trello.com) [Bug Track](https://www.bugtrack.net) [Git Hub](https://github.com) [BPMN.io](https://demo.bpmn.io/s/start)
// Copyright Luc Yriarte <[email protected]> 2018 // License: Apache-2.0 package main import ( "github.com/hyperledger/fabric/core/chaincode/shim" ) type Storable interface { Put(stub shim.ChaincodeStubInterface, key string) error Get(stub shim.ChaincodeStubInterface, key string) error }
#!/bin/bash cd ../../ brew install boost --with-python brew install boost-python ffmpeg xerces-c mono brew cask install java schemas="$(pwd)/Schemas" echo "export MALMO_XSD_PATH=$schemas" >> ~/.bashrc source ~/.bashrc
package com.quickbirdstudios.surveykit.backend.presenter import com.quickbirdstudios.surveykit.FinishReason import com.quickbirdstudios.surveykit.result.StepResult sealed class NextAction { data class Next(val result: StepResult) : NextAction() data class Back(val result: StepResult) : NextAction() object Skip : NextAction() data class Close(val result: StepResult, val finishReason: FinishReason) : NextAction() }
import 'dart:async'; import 'package:academy_app/data/repository/failures/firestore_failure.dart'; import 'package:academy_app/data/repository/user/i_user_repository.dart'; import 'package:academy_app/model/user_data/user_data.dart'; import 'package:bloc/bloc.dart'; import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:injectable/injectable.dart'; part 'get_users_state.dart'; part 'get_users_cubit.freezed.dart'; @injectable class GetUsersCubit extends Cubit<GetUsersState> { final IUserRepo iUserRepo; GetUsersCubit(this.iUserRepo) : super(GetUsersState.initial()); String query = ''; List<UserData> filterUsers = []; late StreamSubscription streamSubscription; Future<void> getUsers() async { streamSubscription = iUserRepo.getUsers(query).listen( (failureOrSucces) { failureOrSucces.fold( (failure) => emit( GetUsersState.loadFailure(failure), ), (users) { filterUsers = users; return emit( GetUsersState.loadSuccess(users), ); }, ); }, ); } void listChanged(List<UserData> users, String queryChanged) { query = queryChanged; filterUsers = users.where((user) { final userName = user.userName!.getOrCrash().toLowerCase(); final search = queryChanged.toLowerCase(); return userName.contains(search); }).toList(); } @override Future<void> close() { streamSubscription.cancel(); return super.close(); } }
def coord(path) e = 0 ne = 0 path = path.chars until path.empty? do s = path.shift case s when ?e e += 1 when ?w e -= 1 when ?n t = path.shift case t when ?e ne += 1 when ?w e -= 1 ne += 1 end when ?s t = path.shift case t when ?e e += 1 ne -= 1 when ?w ne -= 1 end end end [e, ne] end blacks = [] $stdin.readlines.map(&:strip).each do |path| c = coord(path) if blacks.include?(c) blacks.delete(c) else blacks.push(c) end end puts blacks.length
//********************** //Hosting eDrawings control in Windows Forms //Copyright(C) 2019 www.codestack.net //License: https://github.com/codestack-net-dev/solidworks-api-examples/blob/master/LICENSE //Product URL: https://www.codestack.net/edrawings-api/gettings-started/winforms/ //********************** using System; using System.Windows.Forms; using eDrawings.Interop.EModelViewControl; namespace CodeStack.Examples.eDrawingsApi { public partial class EDrawingsUserControl : UserControl { public event Action<EModelViewControl> EDrawingsControlLoaded; public EDrawingsUserControl() { InitializeComponent(); } public void LoadEDrawings() { var host = new EDrawingsHost(); host.ControlLoaded += OnControlLoaded; this.Controls.Add(host); host.Dock = DockStyle.Fill; } private void OnControlLoaded(EModelViewControl ctrl) { EDrawingsControlLoaded?.Invoke(ctrl); } } }
export function lowercaseStaticParts(path: string): string { return path .split('/') .map((part) => { return part.startsWith(':') ? part : part.toLowerCase(); }) .join('/'); }
package com.gowtham.letschat.fragments import android.os.Bundle import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import com.google.android.material.bottomsheet.BottomSheetDialogFragment import com.gowtham.letschat.databinding.FAttachmentBinding import com.gowtham.letschat.databinding.FImageSrcSheetBinding import com.gowtham.letschat.utils.BottomSheetEvent import org.greenrobot.eventbus.EventBus class FAttachment : BottomSheetDialogFragment() { private lateinit var binding: FAttachmentBinding companion object{ fun newInstance(bundle : Bundle) : FAttachment{ val fragment = FAttachment() fragment.arguments=bundle return fragment } } override fun onCreateView( inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View { binding = FAttachmentBinding.inflate(layoutInflater, container, false) return binding.root } override fun onViewCreated(view: View, savedInstanceState: Bundle?) { super.onViewCreated(view, savedInstanceState) binding.imgCamera.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(0)) dismiss() } binding.imgGallery.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(1)) dismiss() } binding.videoGallery.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(2)) dismiss() } binding.videoCamera.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(3)) dismiss() } } }
# Conway's Game of Life A C implementation of Conway's Game of Life using ncurses. ### Getting Started ```sh # Substitute `gmake` if on *BSD $ make $ ./conway ``` Press 'q' or CTRL-C to exit. A full list of keybindings can be displayed by pressing '?'. `conway` can also read a starting position from a cells formatted text file. ```sh $ ./conway patterns/glider.cells ```
module ElasticsearchDslBuilder # @abstact Exceptions raised by ElasticsearchDslBuilder inherit from Error class Error < StandardError; end # Exception raised when Queries::Query.to_hash attempts to build invalid query class InvalidQuery < Error; end end
require "alexa_string_tools/version" require "humanize" module AlexaStringTools # we'll lazy load this mapping. @@alexa_string_mapping = nil def email_from_alexa string = convert_from_alexa_string_to_email(self) string.strip end private def convert_from_alexa_string_to_email(string) alexa_string_mapping.each do |from,to| string = string.gsub(from, to) end string.gsub(' ', '') end def alexa_string_mapping return @@alexa_string_mapping if @@alexa_string_mapping @@alexa_string_mapping = {} # we don’t have to pad these with spaces because the results are crunched together in the end. ('A'..'Z').to_a.each do |letter| @@alexa_string_mapping["#{letter}."] = letter @@alexa_string_mapping["#{letter.downcase}."] = letter.downcase end # it’s okay to pad these with spaces, because they can’t be at the beginning or end anyway. @@alexa_string_mapping.merge!({ ' at ' => '@', ' dot ' => '.', ' period ' => '.', ' underscore ' => '_', ' plus ' => '+', }) # spoken numbers are always converted into integers. (0..10000).to_a.reverse.each do |number| number_as_string = number.humanize number_as_string = number_as_string.gsub(/,/, '') number_as_string = number_as_string.gsub(/-/, ' ') @@alexa_string_mapping[number_as_string] = number.to_s end @@alexa_string_mapping end end String.class_eval do include AlexaStringTools end # trigger the loading of the string mapping automatically at load time. "".email_from_alexa
package com.xaron.equilinoxmodded.framework.blueprintgen.components; import java.io.IOException; import com.xaron.equilinoxmodded.framework.CsvWriter; import com.xaron.equilinoxmodded.framework.blueprintgen.components.deathai.DeathAIGen; import food.FoodSectionType; public class FoodComponentGen extends ComponentGen { public class FoodSection { private int name; private int foodPoints; private FoodSectionType type; private int portions = 0; private DeathAIGen deathAi; public FoodSection(int name, int foodPoints, FoodSectionType type, DeathAIGen deathAi) { this.name = name; this.foodPoints = foodPoints; this.type = type; this.deathAi = deathAi; } public FoodSection(int name, int foodPoints, int portions) { this.name = name; this.foodPoints = foodPoints; this.type = FoodSectionType.TO_SHARE; this.portions = portions; } } private FoodSection[] foodSections; public FoodComponentGen(FoodSection[] foodSections) { super("FOOD"); this.foodSections = foodSections; } @Override public void writeComponent(CsvWriter writer) throws IOException { super.writeComponent(writer); writer.writeInt(foodSections.length); for (int i = 0; i < foodSections.length; i++) { writer.writeInt(foodSections[i].name); writer.writeInt(foodSections[i].foodPoints); writer.writeString(foodSections[i].type.name()); if (foodSections[i].type == FoodSectionType.TO_SHARE) writer.writeLabelInt("portions", foodSections[i].portions); else if (foodSections[i].type == FoodSectionType.WHOLE) foodSections[i].deathAi.write(writer); else if (foodSections[i].type == FoodSectionType.ROOT_VEG) foodSections[i].deathAi.write(writer); } } }
// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. library which.test.candidate_paths; import 'package:unittest/unittest.dart'; import 'util.dart'; main() { group('getCandidatePaths', () { test('posix', () { var candidatePaths = getPosixCandidatePaths('z', '/x/y:/a/b/c', '/foo/bar'); expect(candidatePaths, ['/x/y/z', '/a/b/c/z']); }); test('windows', () { var candidatePaths = getWindowsCandidatePaths('z', r'C:\x\y;C:\a\b\c', '.EXE;.BAT', r'C:\foo\bar'); expect(candidatePaths, [ r'C:\foo\bar\z.EXE', r'C:\foo\bar\z.BAT', r'C:\x\y\z.EXE', r'C:\x\y\z.BAT', r'C:\a\b\c\z.EXE', r'C:\a\b\c\z.BAT']); }); }); }
package it.sephiroth.android.library.kotlin_extensions.io.reactivex import io.reactivex.disposables.Disposable fun Disposable.addTo(autoDisposable: AutoDisposable): Disposable { autoDisposable.add(this) return this }
#pragma once #ifndef UTILS_HPP #define UTILS_HPP namespace cave { /**Vector de tres componentes. * */ struct caveVec3f { float x; float y; float z; caveVec3f(float x, float y, float z) { this->x = x; this->y = y; this->z = z; } caveVec3f() = default; }; /**Cuaternion. * */ struct caveQuat { float x; float y; float z; float w; caveQuat(float x, float y, float z, float w) { this->w = w; this->x = x; this->y = y; this->z = z; } caveQuat() = default; }; /**Color rgba. * */ struct caveColour { float r; float g; float b; float alpha; caveColour(float r=1.0f, float g=1.0f, float b=1.0f, float alpha=1.0f) { this->r = r; this->g = g; this->b = b; this->alpha = alpha; } caveColour() = default; }; } #endif
## 웹 안녕하세요. ! MONKEY.D 입니다 :-) 웹 스크래핑 하는 프레임을 간단하게 적어볼거에요. 요번에는 저만이 참고하는 용으로 작성할 거여서 양해 부탁드립니다. ```python #셀레니움 기본 프레임 from selenium import webdriver browser = webdriver.Chrome() browser.maximize_window() url = "url넣을 주소" browser.get(url) #사이트로 이동. ``` ```python driver.find_element_by_xpath('xpath주소!').click() ``` ```python #뷰티플숩 기본 프레임 import requests from bs4 import BeautifulSoup url = "가져올 url주소" 변수 = requests.get(url) 변수.raise_for_status() #변수.raise_for_status() 만약 홈페이지 보안상의 이유나 모종의 이유로 스크래핑이 불가능한 경우에 오류를 내는 함수식입니다. soup=Beautifulsoup(변수.text, "lxml") ``` ***정규식*** 우리가 어떤 정보를 찾을 때 그래도 대부분의 형식이 정해져있겠죠? 낙엽을 긁개로 쓸어올 때 댕겨와서 긁지 막 던져서 그걸 주워오지는 않지 않습니까. 이런 정규식들을 알아보도록 하겠습니다. 먼저 정규식을 쓰기 위해선 준비가 되어있어야겠죠? ```python import re 변수=re.compile("xx.xx") or 변수=re.compile("^xxx") or 변수=re.compile("xxxx$") ``` 이런식으로 내가 먼저 문자를 어떤 방식으로 찾을 것인지를 정해주어어야합니다. **"."**이 들어가는 경우에는 **"."**한글자를 제외하고 일치하는 문자를 다 찾습니다. 예를 들어 변수=re.compile("ca.e") 라고 한다면 "cafe","case","care"등과 같은 단어들을 요구합니다. **"^"**이 들어가는 경우에는 **' ~로 시작하는'** 의미를 가집니다. 예를 들어 변수=re.compile("^de") 라고 한다면 "destination","deep"등과 같은 단어들을 요구합니다. **** **"$"**이 들어가는 경우에는 '~로 끝나는' 의미를 가집니다. 예를 들어 변수=re.compile("se$") 라고 한다면 "case","base"등과 같은 단어들을 요구합니다. 이렇게 먼저 원하는 문자의 형태를 정해준 다음에 주어진 문자를 입력해서 요구한 단어와 일치하는지를 확인해야 합니다. 그 문자들을 입력하는 형태는 다음과 같습니다. ```python m=p.match("비교할 문자열") or m=p.search("비교할 문자열") or m=p.findall("비교할 문자열") ``` **m=p.match("비교할 문자열")** : 주어진 문자열의 처음부터 일치하는지 확인합니다. 그런데 match함수는 비교할 문자열의 처음부터 일치하는지를 확인하기 때문에 첫부분만 일치하면 뒤의 어떤말이 와도 맞다고 판단합니다. **m=p.search("비교할 문자열")** : 주어진 문자열 중에 일치하는 게 있는지 확인, 즉 중간에 단어가 껴있어도 있기만 한다면 요구하는 단어에 맞다고 판단합니다. **m=p.findall("비교할 문자열")** : 일치하는 모든 것을 "리스트" 형태로 반환합니다. 말 그대로 리스트로 바꿔주기 때문에 보통은 lst변수를 사용합니다. 참조 블로그 : https://blog.naver.com/paragonyun/222205019430
/*global Raphael, d3, $, define */ /*! * Diff的兼容性定义 */ ;(function (name, definition) { if (typeof define === 'function') { // Module define(definition); } else { // Assign to common namespaces or simply the global object (window) this[name] = definition(function (id) { return this[id]; }); } })('Diff', function (require) { var DataV = require('DataV'); /** * 构造函数 * @param {Object} node 表示在html的哪个容器中绘制该组件 * @param {Object} options 为用户自定义的组件的属性,比如画布大小 */ var Diff = DataV.extend(DataV.Chart, { type: "Diff", initialize: function (node, options) { this.node = this.checkContainer(node); //图的大小设置 this.defaults.width = 900; this.defaults.height = 800; //设置用户指定的属性 this.setOptions(options); //创建画布 this.createCanvas(); } }); /** * 创建画布 */ Diff.prototype.createCanvas = function () { this.canvas = new Raphael(this.node, this.defaults.width, this.defaults.height); }; /** * 绘制弦图 */ Diff.prototype.render = function () { this.layout(); }; // 计算顺序的相似度 var diffMap = function (list1, list2) { var map = []; var hit = 0; var lastIndex = -1; for (var i = 0; i < list1.length; i++) { var index = _.indexOf(list2, list1[i]); if (index === -1) { continue; } else { if (index > lastIndex) { lastIndex = index; map.push([i, index]); } hit++; } } console.log(map); console.log(map.length / list1.length); console.log(hit / list1.length); return map; }; /** *对原始数据进行处理 * @param {Array} table 将要被绘制成饼图的二维表数据 */ Diff.prototype.setSource = function (table1, table2) { this.rawData = [table1, table2]; this.diffMap = diffMap(table1, table2); }; /** *创建chord布局 */ Diff.prototype.layout = function () { var that = this; var canvas = that.canvas; var paddingLeft = 10; var paddingTop = 10; var height = 20; var distance = 50; var width = (this.defaults.width - 2 * paddingLeft - distance) / 2; for (var j = 0, k = this.rawData.length; j < k; j++) { var maped = _.pluck(this.diffMap, j); for (var i = 0, l = this.rawData[j].length; i < l; i++) { canvas.rect(paddingLeft + j * (width + distance), paddingTop + height * i, width, height).attr({fill: _.indexOf(maped, i) !== -1 ? "#00ff00" : "#ff0000"}); canvas.text(paddingLeft + j * (width + distance), paddingTop + height * i + height / 2, this.rawData[j][i]).attr({'text-anchor': 'start'}); } } for (var i = 0, l = this.diffMap.length; i < l; i++) { var line = this.diffMap[i]; canvas.path("M" + (paddingLeft + width) + ' ' + (paddingTop + height * line[0] + height / 2) + "L" + (paddingLeft + width + distance) + " " + (paddingTop + height * line[1] + height / 2)).attr({stroke: '#00ff00'}); } }; return Diff; });
var options = function(type, height, numbers , color){ return { chart: { height: height, width: '100%', type: type, sparkline: { enabled: true }, toolbar: { show: false, }, }, grid: { show: false, padding: { top: 0, right: 0, bottom: 0, left: 0 } }, dataLabels: { enabled: false }, legend: { show: false, }, series: [ { name: "serie1", data: numbers } ], fill: { colors: [color], }, stroke:{ colors: [color], width: 3 }, yaxis: { show: false, }, xaxis: { show: false, labels: { show: false, }, axisBorder: { show: false, }, tooltip: { enabled: false, } }, }; } var analytics_1 = document.getElementsByClassName("analytics_1"); if (analytics_1 != null && typeof(analytics_1) != 'undefined') { var chart = new ApexCharts(analytics_1[0], options("area" , '51px' , numArr(10,99) , '#4fd1c5')); var chart_1 = new ApexCharts(analytics_1[1], options("area" , '51px' , numArr(10,99) , '#4c51bf')); chart.render(); chart_1.render(); }
using Random using StaticArrays using Test using CLIMA using CLIMA.Atmos using CLIMA.GenericCallbacks using CLIMA.ODESolvers using CLIMA.Mesh.Filters using CLIMA.MoistThermodynamics using CLIMA.PlanetParameters using CLIMA.VariableTemplates # ------------------------ Description ------------------------- # # 1) Dry Rising Bubble (circular potential temperature perturbation) # 2) Boundaries - `All Walls` : NoFluxBC (Impermeable Walls) # Laterally periodic # 3) Domain - 2500m[horizontal] x 2500m[horizontal] x 2500m[vertical] # 4) Timeend - 1000s # 5) Mesh Aspect Ratio (Effective resolution) 1:1 # 7) Overrides defaults for # `forcecpu` # `solver_type` # `sources` # `C_smag` # 8) Default settings can be found in `src/Driver/Configurations.jl` # ------------------------ Description ------------------------- # function init_risingbubble!(bl, state, aux, (x,y,z), t) FT = eltype(state) R_gas::FT = R_d c_p::FT = cp_d c_v::FT = cv_d γ::FT = c_p / c_v p0::FT = MSLP xc::FT = 1250 yc::FT = 1250 zc::FT = 1000 r = sqrt((x-xc)^2+(y-yc)^2+(z-zc)^2) rc::FT = 500 θ_ref::FT = 300 Δθ::FT = 0 if r <= rc Δθ = FT(5) * cospi(r/rc/2) end #Perturbed state: θ = θ_ref + Δθ # potential temperature π_exner = FT(1) - grav / (c_p * θ) * z # exner pressure ρ = p0 / (R_gas * θ) * (π_exner)^ (c_v / R_gas) # density P = p0 * (R_gas * (ρ * θ) / p0) ^(c_p/c_v) # pressure (absolute) T = P / (ρ * R_gas) # temperature ρu = SVector(FT(0),FT(0),FT(0)) #State (prognostic) variable assignment e_kin = FT(0) e_pot = grav * z ρe_tot = ρ * total_energy(e_kin, e_pot, T) state.ρ = ρ state.ρu = ρu state.ρe = ρe_tot state.moisture.ρq_tot = FT(0) end function config_risingbubble(FT, N, resolution, xmax, ymax, zmax) # Boundary conditions bc = NoFluxBC() # Choose explicit solver ode_solver = CLIMA.ExplicitSolverType(solver_method=LSRK144NiegemannDiehlBusch) # Set up the model C_smag = FT(0.23) model = AtmosModel{FT}(AtmosLESConfiguration; turbulence=SmagorinskyLilly{FT}(C_smag), source=(Gravity(),), init_state=init_risingbubble!) # Problem configuration config = CLIMA.Atmos_LES_Configuration("DryRisingBubble", N, resolution, xmax, ymax, zmax, init_risingbubble!, solver_type=ode_solver, model=model) return config end function main() CLIMA.init() # Working precision FT = Float64 # DG polynomial order N = 4 # Domain resolution and size Δh = FT(50) Δv = FT(50) resolution = (Δh, Δh, Δv) # Domain extents xmax = 2500 ymax = 2500 zmax = 2500 # Simulation time t0 = FT(0) timeend = FT(1000) # Courant number CFL = FT(0.8) driver_config = config_risingbubble(FT, N, resolution, xmax, ymax, zmax) solver_config = CLIMA.setup_solver(t0, timeend, driver_config, forcecpu=true, Courant_number=CFL) # User defined filter (TMAR positivity preserving filter) cbtmarfilter = GenericCallbacks.EveryXSimulationSteps(1) do (init=false) Filters.apply!(solver_config.Q, 6, solver_config.dg.grid, TMARFilter()) nothing end # Invoke solver (calls solve! function for time-integrator) result = CLIMA.invoke!(solver_config; user_callbacks=(cbtmarfilter,), check_euclidean_distance=true) @test isapprox(result,FT(1); atol=1.5e-3) end main()
namespace Evolution.Textkernel { using Evolution.Textkernel.Models; using System.Threading.Tasks; /// <summary>Interface for Textkernel's CV Extract parser.</summary> public interface ITextkernelParser { /// <summary>Send the file bytes to the service and get back the deserialised result.</summary> /// <param name="file">The CV file to parse.</param> /// <param name="filename">Optional name of the file.</param> /// <returns>The deserialised result.</returns> Task<Profile> Parse(byte[] file, string filename = null); } }
(ns metro.components.web.health (:require [clojure.data.json :as json] [metro.components.server.pedestal :as server] [metro.components.db.postgres :as postgres])) (defn health [request] {:status 200 :headers {"Content-Type" "application/json"} :body (json/write-str {:server @server/status :database @postgres/status})})
import React, { Component } from 'react' import ConcertList from '../components/ConcertList' import DeleteArtist from '../components/DeleteArtist' class CurrentArtist extends Component { componentDidMount() { this.props.getConcerts(this.props.currentArtist.id) } componentDidUpdate(prevProps) { if(this.props.currentArtist !== prevProps.currentArtist) { this.props.getConcerts(this.props.currentArtist.id) } } render() { let view return ( <div class='container'> <br/><br/> <div class='row'> <div class='col-lg'> <h2>{this.props.currentArtist.name}</h2><br/> <img class='img-fluid' src={this.props.currentArtist.image_url}/><br/><br/> <DeleteArtist id={this.props.currentArtist.id} deleteArtist={this.props.deleteArtist}/><br/> </div> <div className='col-lg'> <ConcertList concerts={this.props.concerts} artistId = {this.props.currentArtist.id} addConcert={this.props.add}/> </div> </div> </div> ) } } export default CurrentArtist
<?php namespace estoque\Http\Controllers; use Illuminate\Http\Request; use estoque\Http\Requests; use Illuminate\Support\Facades\DB; use estoque\Produto; use estoque\Http\Requests\ProdutoRequest; class ProdutoController extends Controller { public function __construct(Request $request) { //$this->middleware('auth', ['except' => ['/home', '/lala']]); $this->middleware('auth'); } public function lista() { /*$produtos = DB::select('SELECT * FROM produtos'); if(view()->exists('produto.listagem')) { return view('produto/listagem')->with('produtos', $produtos); return view('listagem', ['produtos' => $produtos]); return view('listagem')->withProdutos($produtos);//magic methods } else { return view('welcome'); }*/ $produtos = Produto::all(); return view('produto/listagem')->with('produtos', $produtos); } //?id=1 public function mostra(Request $request) { /*$id = $request->route('id'); $produto = DB::select('SELECT * FROM produtos WHERE id = ?', [$id]);*/ if($request->has('id')) {//verifica se um parâmetro foi informado $id = $request->input('id'); //$produto = DB::select('SELECT * FROM produtos WHERE id = ?', [$id]); $produto = Produto::find($id); } else { return "Informe um id."; } if(empty($produto)) { return "Este produto não existe."; } return view('produto/detalhes')->with('p', $produto); } public function novo() { return view('produto/formulario'); } public function adiciona(ProdutoRequest $req) { /* $nome = $req->input('nome'); $desc = $req->input('descricao'); $valor = $req->input('valor'); $qtd = $req->input('quantidade'); DB::insert('INSERT INTO produtos (nome, quantidade, valor, descricao) VALUES (?, ?, ?, ?)', array($nome, $qtd, $valor, $desc)); //return redirect('/produtos')->withInput();//envia tudo //return redirect('/produtos')->withInput($req->only('nome')); return redirect()->action('ProdutoController@lista')->withInput();*/ //outra forma de fazer /*$params = $req->all(); $produto = new Produto($params); $produto->save();*/ //mais uma forma de fazer Produto::create($req->all()); return redirect()->action('ProdutoController@lista')->withInput(); } //passando com a barra /1 public function remove($id) { $produto = Produto::find($id); $produto->delete(); return redirect()->action('ProdutoController@lista'); } //poderia ter usado o método mostrar, mas para deixar um exemplo de como fazer das maneiras vou usar esse public function editar($id) { $produto = Produto::find($id); return view('produto/atualiza')->with('p', $produto); } public function atualiza($id, Request $req) { $produto = Produto::findOrFail($id); $params = $req->all(); $produto->fill($params)->save(); return redirect()->action('ProdutoController@lista'); } } /* exemplo public function __construct(Request $request) { $this->request = $request; } public function checkText() { $txt = $this->request->has('txt'); return $txt; } */
package org.zstack.header.identity; import org.zstack.header.query.ExpandedQueries; import org.zstack.header.query.ExpandedQuery; import org.zstack.header.search.Inventory; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.List; @Inventory(mappingVOClass = UserGroupUserRefVO.class) @ExpandedQueries({ @ExpandedQuery(expandedField = "user", inventoryClass = UserInventory.class, foreignKey = "userUuid", expandedInventoryKey = "uuid"), @ExpandedQuery(expandedField = "group", inventoryClass = UserGroupInventory.class, foreignKey = "groupUuid", expandedInventoryKey = "uuid") }) public class UserGroupUserRefInventory { private String userUuid; private String groupUuid; private Timestamp createDate; private Timestamp lastOpDate; public static UserGroupUserRefInventory valueOf(UserGroupUserRefVO vo) { UserGroupUserRefInventory inv = new UserGroupUserRefInventory(); inv.setUserUuid(vo.getUserUuid()); inv.setGroupUuid(vo.getGroupUuid()); inv.setCreateDate(vo.getCreateDate()); inv.setLastOpDate(vo.getLastOpDate()); return inv; } public static List<UserGroupUserRefInventory> valueOf(Collection<UserGroupUserRefVO> vos) { List<UserGroupUserRefInventory> invs = new ArrayList<UserGroupUserRefInventory>(); for (UserGroupUserRefVO vo : vos) { invs.add(valueOf(vo)); } return invs; } public String getUserUuid() { return userUuid; } public void setUserUuid(String userUuid) { this.userUuid = userUuid; } public String getGroupUuid() { return groupUuid; } public void setGroupUuid(String groupUuid) { this.groupUuid = groupUuid; } public Timestamp getCreateDate() { return createDate; } public void setCreateDate(Timestamp createDate) { this.createDate = createDate; } public Timestamp getLastOpDate() { return lastOpDate; } public void setLastOpDate(Timestamp lastOpDate) { this.lastOpDate = lastOpDate; } }
rootProject.name = "PortalClosers" include(":core") project(":core").projectDir = File("game/core") include(":android") project(":android").projectDir = File("game/android") include(":desktop") project(":desktop").projectDir = File("game/desktop") include(":headless") project(":headless").projectDir = File("game/headless") includeBuild("engine/gradle-plugins")
package v1 import ( "github.com/Gavazn/Gavazn/internal/category" "github.com/Gavazn/Gavazn/internal/comment" "github.com/Gavazn/Gavazn/internal/post" "github.com/Gavazn/Gavazn/internal/user" "github.com/labstack/echo" "go.mongodb.org/mongo-driver/bson" ) /** * @api {get} /api/v2/dashboard get statistics * @apiVersion 1.0.0 * @apiName getStatistics * @apiGroup Dashboard * * @apiSuccess {Number} total_posts * @apiSuccess {Number} total_categories * @apiSuccess {Number} total_comments * @apiSuccess {Number} total_users * * @apiError {String} error api error message */ func getStatistics(ctx echo.Context)error{ return ctx.JSON(200, echo.Map{ "total_posts": post.Count(bson.M{}), "total_categories": category.Count(bson.M{}), "total_comments": comment.Count(bson.M{}), "total_users": user.Count(bson.M{}), }) }
function addServicesList(name, file) { angular.bootstrap(document.getElementById("divWrap"), ['useApp']); var list = RepeatObj.addList(name, file, function () { showService(name); function showService(name) { console.log(name + ' initialize(); complete'); RepeatObj.useList.title = name; RepeatObj.useList.setActions({ entry: { Action: 'Done' } }); RepeatObj.useList.initialize(name, true); } }); return (list); } function ToolbarObj (tempid, key) { var listobj = ListDataObj(tempid); listobj.DataKey = key; listobj.addReferences = addReferences; listobj.listobj_processData = listobj.processData; listobj.processDatax = function (data) { listobj.listobj_processData(data); if ( typeof (listobj.addReferences) === 'undefined') { } else { listobj.addReferences(); delete (listobj.addReferences); } } return (listobj); } function addListObj(tempid, jsonfilename, readyfunc) { var funcname = 'RepeatObj.addList()'; var listobj = null; if ((listobj = ToolbarObj(tempid, RepeatObj.getDataKey(jsonfilename))) == null) { console.log(funcname + '(); Error in ' + funcname + '; unable to create ListObj'); } else { console.log(funcname + '(); NEW [' + tempid + '] ListObj[' + listobj.DataKey + ']'); } RepeatObj.addListObj(listobj, tempid, jsonfilename, readyfunc); return (listobj); } function addServices() { //restoreTemp(addServicesList); return (addServicesList('Service', '/data/Service.json')); }
import React from 'react' import { graphql } from 'gatsby' import Layout from '../components/Layout' import { useSiteMetadata } from '../hooks' import WebmentionReplies from '../components/Webmention/WebmentionFeed' import { Helmet } from 'react-helmet/es/Helmet' const PageTemplate = ({ data }) => { const { title: siteTitle } = useSiteMetadata() const { html: pageBody, frontmatter, fields } = data.markdownRemark return ( <Layout title={`${frontmatter.title} | ${siteTitle}`} description={frontmatter.description} > <Helmet> <script type="application/ld+json"> {`{ "@context": "http://schema.org", "@type": "WebPage", "name": "${frontmatter.title} | ${siteTitle}", "url": "${'https://arisemyson.com' + fields.slug}", "description": "${frontmatter.description}" }`} </script> </Helmet> <div className={'content'}> <div className={'section-title'}>{frontmatter.title}</div> <article dangerouslySetInnerHTML={{ __html: pageBody }} /> <hr /> <div className={'socialize'}> <form id="comment-form" method="get" action="https://quill.p3k.io/" target="_blank" > <input type="hidden" name="dontask" value="1" /> <input type="hidden" name="me" value="https://commentpara.de/" /> <input type="hidden" name="reply" value={'https://arisemyson.com' + fields.slug} /> </form> <a className={'button button-outline button-small'} target="_blank" href={`https://twitter.com/intent/tweet/?text=My%20thoughts%20on%20${'https://hbish.com' + fields.slug}`} > Tweet this post{' '} </a> <input form="comment-form" className={'button button-outline button-small'} type="submit" value="Write a comment" />{' '} </div> <WebmentionReplies target={'https://arisemyson.com' + fields.slug} /> </div> </Layout> ) } export default PageTemplate export const pageQuery = graphql` query PageBySlug($slug: String!) { site { siteMetadata { title author { name } } } markdownRemark(fields: { slug: { eq: $slug } }) { id excerpt html fields { slug } frontmatter { title date(formatString: "MMMM DD, YYYY") categories tags description } } } `
from rest_framework.permissions import BasePermission class HasValidToken(BasePermission): """Has Valid Token Checks if the request has a valid token, even if the user is not authenticated """ def has_permission(self, request, view): return bool(getattr(request, "auth", False))
package org.example.quiz.service import cats.effect.{ContextShift, IO} import org.example.quiz.dao.QuestionAnswerDao import org.example.quiz.entities._ class QuizService(dao: QuestionAnswerDao, categoryService: CategoryService) (implicit cs: ContextShift[IO]) { private val numberOfQuestions = 10 def generate(categoryId: Long): IO[Option[QuizEntity]] = categoryService.get(categoryId).flatMap { case Some(category) => pickQuestions(category, numberOfQuestions).map(qs => Some(QuizEntity(qs))) case None => IO(None) } private def pickQuestions(category: CategoryEntity, n: Int): IO[List[QuestionEntity]] = { val randomQAs = IO.fromFuture(IO(dao.pickByCategoryId(category.id, n = n))) randomQAs.map { qas => qas.map { case (q, as) => QuestionEntity.fromRecord(q, as) }.toList } } def score(givenAnswers: List[GivenAnswerEntity]): IO[ScoreEntity] = { val questionIds = givenAnswers.map(_.questionId) IO.fromFuture(IO(dao.getCorrectQuestionAnswers(questionIds))).map { correctAnswers => val goodAnswers = givenAnswers.filter { answer => correctAnswers.exists { case (q, a) => q == answer.questionId && a == answer.answerId } } val badAnswers = givenAnswers.diff(goodAnswers) val score = 1.0 * goodAnswers.size / givenAnswers.size ScoreEntity(score, correct = goodAnswers, wrong = badAnswers) } } }
package aws import ( "fmt" "net/url" "regexp" "strings" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/service/iam" "github.com/hashicorp/terraform/helper/schema" ) func resourceAwsIamRolePolicy() *schema.Resource { return &schema.Resource{ // PutRolePolicy API is idempotent, so these can be the same. Create: resourceAwsIamRolePolicyPut, Update: resourceAwsIamRolePolicyPut, Read: resourceAwsIamRolePolicyRead, Delete: resourceAwsIamRolePolicyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, Schema: map[string]*schema.Schema{ "policy": &schema.Schema{ Type: schema.TypeString, Required: true, }, "name": &schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, ValidateFunc: func(v interface{}, k string) (ws []string, errors []error) { // https://github.com/boto/botocore/blob/2485f5c/botocore/data/iam/2010-05-08/service-2.json#L8291-L8296 value := v.(string) if len(value) > 128 { errors = append(errors, fmt.Errorf( "%q cannot be longer than 128 characters", k)) } if !regexp.MustCompile("^[\\w+=,.@-]+$").MatchString(value) { errors = append(errors, fmt.Errorf( "%q must match [\\w+=,.@-]", k)) } return }, }, "role": &schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, }, }, } } func resourceAwsIamRolePolicyPut(d *schema.ResourceData, meta interface{}) error { iamconn := meta.(*AWSClient).iamconn request := &iam.PutRolePolicyInput{ RoleName: aws.String(d.Get("role").(string)), PolicyName: aws.String(d.Get("name").(string)), PolicyDocument: aws.String(d.Get("policy").(string)), } if _, err := iamconn.PutRolePolicy(request); err != nil { return fmt.Errorf("Error putting IAM role policy %s: %s", *request.PolicyName, err) } d.SetId(fmt.Sprintf("%s:%s", *request.RoleName, *request.PolicyName)) return nil } func resourceAwsIamRolePolicyRead(d *schema.ResourceData, meta interface{}) error { iamconn := meta.(*AWSClient).iamconn role, name, err := resourceAwsIamRolePolicyParseId(d.Id()) if err != nil { return err } request := &iam.GetRolePolicyInput{ PolicyName: aws.String(name), RoleName: aws.String(role), } getResp, err := iamconn.GetRolePolicy(request) if err != nil { if iamerr, ok := err.(awserr.Error); ok && iamerr.Code() == "NoSuchEntity" { // XXX test me d.SetId("") return nil } return fmt.Errorf("Error reading IAM policy %s from role %s: %s", name, role, err) } if getResp.PolicyDocument == nil { return fmt.Errorf("GetRolePolicy returned a nil policy document") } policy, err := url.QueryUnescape(*getResp.PolicyDocument) if err != nil { return err } if err := d.Set("policy", policy); err != nil { return err } if err := d.Set("name", name); err != nil { return err } return d.Set("role", role) } func resourceAwsIamRolePolicyDelete(d *schema.ResourceData, meta interface{}) error { iamconn := meta.(*AWSClient).iamconn role, name, err := resourceAwsIamRolePolicyParseId(d.Id()) if err != nil { return err } request := &iam.DeleteRolePolicyInput{ PolicyName: aws.String(name), RoleName: aws.String(role), } if _, err := iamconn.DeleteRolePolicy(request); err != nil { return fmt.Errorf("Error deleting IAM role policy %s: %s", d.Id(), err) } return nil } func resourceAwsIamRolePolicyParseId(id string) (roleName, policyName string, err error) { parts := strings.SplitN(id, ":", 2) if len(parts) != 2 { err = fmt.Errorf("role_policy id must be of the for <role name>:<policy name>") return } roleName = parts[0] policyName = parts[1] return }
function Invoke-AmoebaMLPipeline { [CmdletBinding()] param( [Parameter(Mandatory=$True,ValueFromPipeline=$True)] [ref]$Pipeline, [Parameter(Mandatory)] [Type]$DataSet, [Parameter(Mandatory)] [Type]$Predictor ) Write-Verbose ("{0}: {1}" -f $MyInvocation.MyCommand, "Training Data"); $Method = [Microsoft.ML.LearningPipeline].GetMethod("Train").MakeGenericMethod([Type]$DataSet,[Type]$Predictor) try { return $Method.Invoke($Pipeline.Value, $null) } catch { write-host ( @( $_ $_.exception.InnerException.StackTrace $_.exception $_.exception.InnerException $_.exception.InnerException.InnerException $_.exception.InnerException.InnerException.InnerException $_.exception.InnerException.InnerException.InnerException.InnerException ) | out-string ) } }
namespace :doc do desc "Generate the documentation" task :yard do puts "Generating YARD documentation" system(File.join("..", "build", "run"), "doc:yardoc") end desc "Create the API.md file" task :api do require 'erb' require 'sinatra' require 'jsonmodel' require_relative '../backend/app/lib/rest.rb' require_relative '../backend/app/lib/username.rb' require_relative '../backend/app/model/backend_enum_source.rb' require_relative '../backend/app/lib/logging.rb' require_relative '../backend/app/lib/streaming_import.rb' require_relative '../backend/app/lib/component_transfer.rb' require_relative '../backend/app/lib/reports/report_helper.rb' class ArchivesSpaceService < Sinatra::Base def self.helpers nil end include RESTHelpers end @time = Time.new JSONModel::init(:enum_source => BackendEnumSource) require_relative '../backend/app/lib/export' Dir.glob(File.dirname(__FILE__) + '/../backend/app/controllers/*.rb') {|file| require file unless file =~ /system/} @endpoints = ArchivesSpaceService::Endpoint.all.sort{|a,b| a[:uri] <=> b[:uri]} @examples = JSON.parse( IO.read File.dirname(__FILE__) + "/../endpoint_examples.json" ) erb = ERB.new(File.read('API.erb'), nil, '<>') File.open('../API.md', 'w') do |f| f.write erb.result(binding) end end desc 'Rename the YARD index file to avoid problems with Jekyll' task :rename_index do Dir.chdir('../') do files = Dir.glob('doc/**/*') files.each do |f| if File::file?(f) content = File.read(f) content.gsub!('"_index.html"', '"alpha_index.html"') content.gsub!('/_index.html', '/alpha_index.html') File.open(f, "w") do |io| io.write content end end end `mv doc/_index.html doc/alpha_index.html` end end desc 'This generates all documentation and publishes it to the doc folder' task :gen do Rake::Task["doc:api"].invoke Rake::Task["doc:yard"].invoke # Rake::Task["doc:yard-txt"].invoke Rake::Task["doc:rename_index"].invoke end end
//index.js //获取应用实例 Page({ onShareAppMessage(): object { return { title: 'Oak Weui Avatar 头像', path: '/pages/avatar/avatar', } }, })
module model_module use amrex_fort_module, only : rt => amrex_real implicit none contains subroutine get_model_size(ymin, ymax, dy, lo, hi) use amrex_fort_module, only : rt => amrex_real real(rt) , intent(in) :: ymin, ymax, dy integer, intent(out) :: lo, hi integer :: npts ! number of points in the domain npts = (ymax - ymin)/dy + 1 ! we'll do some ghost cells, for the boundary conditions ! by design, the base of the model will be at zone 0 lo = -4 hi = npts + 4 end subroutine get_model_size subroutine get_model(ymin, ymax, dy, & pres_base, dens_base, do_isentropic, & xn_model, & r_model, rho_model, T_model, e_model, p_model, & lo, hi) use eos_module, only : eos use eos_type_module, only : eos_t, eos_input_rp use network, only : nspec use meth_params_module, only: const_grav use amrex_fort_module, only : rt => amrex_real integer, intent(in) :: lo, hi real(rt) , intent(in) :: ymin, ymax, dy real(rt) , intent(in) :: pres_base, dens_base logical, intent(in) :: do_isentropic real(rt) , intent(in) :: xn_model(nspec) real(rt) , intent(out) :: r_model(lo:hi) real(rt) , intent(out) :: rho_model(lo:hi) real(rt) , intent(out) :: T_model(lo:hi) real(rt) , intent(out) :: e_model(lo:hi) real(rt) , intent(out) :: p_model(lo:hi) real(rt) :: H, gamma_const integer :: j type (eos_t) :: eos_state ! compute the pressure scale height (for an isothermal, ideal-gas ! atmosphere) H = pres_base / dens_base / abs(const_grav) ! create the constant if we are isentropic eos_state % rho = dens_base eos_state % p = pres_base eos_state % xn(:) = xn_model(:) ! initial guess eos_state % T = 1000.0e0_rt call eos(eos_input_rp, eos_state) gamma_const = pres_base/(dens_base * eos_state % e) + 1.0e0_rt rho_model(0) = dens_base p_model(0) = pres_base r_model(0) = ymin + 0.5e0_rt*dy ! integrate up from the base do j = 1, hi r_model(j) = ymin + (dble(j)+0.5e0_rt)*dy if (do_isentropic) then rho_model(j) = dens_base*(const_grav*dens_base*(gamma_const - 1.0)* & (r_model(j)-r_model(0))/ & (gamma_const*pres_base) + 1.e0_rt)**(1.e0_rt/(gamma_const - 1.e0_rt)) else rho_model(j) = dens_base * exp(-(r_model(j)-r_model(0))/H) endif p_model(j) = p_model(j-1) - & dy * 0.5e0_rt * (rho_model(j)+rho_model(j-1)) * abs(const_grav) enddo ! integrate down from the base do j = -1, lo, -1 r_model(j) = ymin + (dble(j)+0.5e0_rt)*dy if (do_isentropic) then rho_model(j) = dens_base*(const_grav*dens_base*(gamma_const - 1.0)* & (r_model(j)-r_model(0))/ & (gamma_const*pres_base) + 1.e0_rt)**(1.e0_rt/(gamma_const - 1.e0_rt)) else rho_model(j) = dens_base * exp(-(r_model(j)-r_model(0))/H) endif p_model(j) = p_model(j+1) + & dy * 0.5e0_rt * (rho_model(j)+rho_model(j+1)) * abs(const_grav) enddo ! thermodynamics do j = lo, hi eos_state % rho = rho_model(j) eos_state % p = p_model(j) eos_state % xn(:) = xn_model(:) ! initial guess eos_state % T = 1000.0e0_rt call eos(eos_input_rp, eos_state) e_model(j) = eos_state % e T_model(j) = eos_state % T end do end subroutine get_model end module model_module
export default { control: { opacity: 1, backgroundColor: "#242424" }, button: { backgroundColor: "#332A00" }, buttonLabel: { color: "#FBD89B" }, disabledButtonLabel: { color: "#4F493D", fontSize: 20 }, cancelButton: { backgroundColor: "#5B0000", }, cancelButtonLabel: { color: "#A49A8C" }, title: { color: "#BDC5CE" }, message: { color: "#7F7F7F" } };
// JVM_TARGET: 1.8 // KOTLIN_CONFIGURATION_FLAGS: +JVM.JVM8_TARGET_WITH_DEFAULTS interface Test { fun test(): String { return "OK" } fun testAbstract(): String } // TESTED_OBJECT_KIND: function // TESTED_OBJECTS: Test, test // FLAGS: ACC_PUBLIC // TESTED_OBJECT_KIND: function // TESTED_OBJECTS: Test, testAbstract // FLAGS: ACC_PUBLIC, ACC_ABSTRACT
require "active_support/dependencies" module ActiveMailer # Our host application root path # We set this when the engine is initialized mattr_accessor :app_root # Yield self on setup for nice config blocks def self.setup yield self end end require "active_mailer/engine" ActiveRecord::ConnectionAdapters::SchemaStatements.module_eval do def create_active_mailer_table(*args, &block) block_with_default_columns = Proc.new do |t| t.integer :sender_id t.timestamp :sent_at t.string :subject block.call(t) end create_table(*args, &block_with_default_columns) end end
<?php /** * Category */ namespace app\Models; /** * Category */ class Category { /** @var int $id */ public $id = 0; /** @var string $name */ public $name = 'default-name'; }
<?php namespace App\Http\Controllers\Admin; use App\Http\Model\Real; use App\Http\Model\User; use Validator; use Illuminate\Support\Facades\Crypt; //use \Illuminate\Validation\Validator; use Illuminate\Http\Request; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Input; class IndexController extends CommonController { public function index(){ // $pdo= DB::connection()->getPdo(); // dd($pdo); return view('admin.index'); }// public function info(){ return view('admin.info'); } public function element(){ return view('admin.element'); } // public function map() // { // $loca=Real::all(); //// dd($loca); // // return view('admin.map')->with('data',$loca); // } public function pass(){ if ($input = Input::all()){ $rules=[ 'password'=>'required|between:6,20|confirmed', ]; $message=[ 'password.required'=>'新密码不能为空!', 'password.between'=>'新密码长度必须在6到20位之间!', 'password.confirmed'=>'新密码与确认密码不匹配!' ]; $validator= Validator::make($input,$rules,$message); if ($validator->passes()){ $user=User::where('user_name','=',session('user.user_name'))->first(); $_password = Crypt::decrypt($user->user_password); // dd($_password); if ($input['password_o']==$_password){ $user->user_password = Crypt::encrypt($input['password']); $user->update(); // dd($user->user_password); // return back()->withErrors(['errors'=>'密码修改成功啦!']); } else{ return back()->withErrors(['errors'=>'原密码错误!']); // return back(); } } else{ // return back()->withErrors($validator); // dd($validator->errors()->all()); return back() ->withErrors($validator); } } else{ return view('admin.pass'); } } }
package my.sample class A fun A.check() {} fun test() { val a = A() a.check<caret>() A().check() }
fn = normpath(joinpath(dirname(@__FILE__),"center_sizes.jld2")) d = JLD2.jldopen(fn,"r") tmp = d["ctrs"] ctrs = [SVector(q...) for q in tmp] rads = d["rads"] tree = CD.Octree(ctrs, rads) # extract all the triangles that (potentially) intersect octant (+,+,+) pred(i) = all(ctrs[i].+rads[i] .> 0) bb = SVector(0.5, 0.5, 0.5), 0.5 ids = collect(CD.searchtree(pred, tree, bb)) @test length(ids) == 178 N = 100 using DelimitedFiles buf = readdlm(joinpath(@__DIR__,"assets","ctrs.csv")) ctrs = vec(collect(reinterpret(SVector{3,Float64}, buf'))) rads = vec(readdlm(joinpath(@__DIR__,"assets","rads.csv"))) tree = CD.Octree(ctrs, rads) pred(i) = all(ctrs[i] .+ rads[i] .> 0) bb = @SVector[0.5, 0.5, 0.5], 0.5 ids = collect(CD.searchtree(pred, tree, bb)) @show ids ids2 = findall(i -> all(ctrs[i].+rads[i] .> 0), 1:N) @test length(ids2) == length(ids) @test sort(ids2) == sort(ids) @test ids == [26, 46, 54, 93, 34, 94, 75, 23, 86, 57, 44, 40, 67, 73, 77, 80]