loubnabnl/github-code-duplicate · Datasets at Fast360
{
// 获取包含Hugging Face文本的span元素
const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap');
spans.forEach(span => {
if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) {
span.textContent = 'AI快站';
}
});
});
// 替换logo图片的alt属性
document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => {
if (img.alt.match(/Hugging\s*Face/i)) {
img.alt = 'AI快站 logo';
}
});
}
// 替换导航栏中的链接
function replaceNavigationLinks() {
// 已替换标记,防止重复运行
if (window._navLinksReplaced) {
return;
}
// 已经替换过的链接集合,防止重复替换
const replacedLinks = new Set();
// 只在导航栏区域查找和替换链接
const headerArea = document.querySelector('header') || document.querySelector('nav');
if (!headerArea) {
return;
}
// 在导航区域内查找链接
const navLinks = headerArea.querySelectorAll('a');
navLinks.forEach(link => {
// 如果已经替换过,跳过
if (replacedLinks.has(link)) return;
const linkText = link.textContent.trim();
const linkHref = link.getAttribute('href') || '';
// 替换Spaces链接 - 仅替换一次
if (
(linkHref.includes('/spaces') || linkHref === '/spaces' ||
linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) &&
linkText !== 'OCR模型免费转Markdown' &&
linkText !== 'OCR模型免费转Markdown'
) {
link.textContent = 'OCR模型免费转Markdown';
link.href = 'https://fast360.xyz';
link.setAttribute('target', '_blank');
link.setAttribute('rel', 'noopener noreferrer');
replacedLinks.add(link);
}
// 删除Posts链接
else if (
(linkHref.includes('/posts') || linkHref === '/posts' ||
linkText === 'Posts' || linkText.match(/^s*Postss*$/i))
) {
if (link.parentNode) {
link.parentNode.removeChild(link);
}
replacedLinks.add(link);
}
// 替换Docs链接 - 仅替换一次
else if (
(linkHref.includes('/docs') || linkHref === '/docs' ||
linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) &&
linkText !== '模型下载攻略'
) {
link.textContent = '模型下载攻略';
link.href = '/';
replacedLinks.add(link);
}
// 删除Enterprise链接
else if (
(linkHref.includes('/enterprise') || linkHref === '/enterprise' ||
linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i))
) {
if (link.parentNode) {
link.parentNode.removeChild(link);
}
replacedLinks.add(link);
}
});
// 查找可能嵌套的Spaces和Posts文本
const textNodes = [];
function findTextNodes(element) {
if (element.nodeType === Node.TEXT_NODE) {
const text = element.textContent.trim();
if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') {
textNodes.push(element);
}
} else {
for (const child of element.childNodes) {
findTextNodes(child);
}
}
}
// 只在导航区域内查找文本节点
findTextNodes(headerArea);
// 替换找到的文本节点
textNodes.forEach(node => {
const text = node.textContent.trim();
if (text === 'Spaces') {
node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown');
} else if (text === 'Posts') {
// 删除Posts文本节点
if (node.parentNode) {
node.parentNode.removeChild(node);
}
} else if (text === 'Enterprise') {
// 删除Enterprise文本节点
if (node.parentNode) {
node.parentNode.removeChild(node);
}
}
});
// 标记已替换完成
window._navLinksReplaced = true;
}
// 替换代码区域中的域名
function replaceCodeDomains() {
// 特别处理span.hljs-string和span.njs-string元素
document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => {
if (span.textContent && span.textContent.includes('huggingface.co')) {
span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
});
// 替换hljs-string类的span中的域名(移除多余的转义符号)
document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => {
if (span.textContent && span.textContent.includes('huggingface.co')) {
span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
});
// 替换pre和code标签中包含git clone命令的域名
document.querySelectorAll('pre, code').forEach(element => {
if (element.textContent && element.textContent.includes('git clone')) {
const text = element.innerHTML;
if (text.includes('huggingface.co')) {
element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
// 处理特定的命令行示例
document.querySelectorAll('pre, code').forEach(element => {
const text = element.innerHTML;
if (text.includes('huggingface.co')) {
// 针对git clone命令的专门处理
if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) {
element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
// 特别处理模型下载页面上的代码片段
document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => {
const content = container.innerHTML;
if (content && content.includes('huggingface.co')) {
container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com');
}
});
// 特别处理模型仓库克隆对话框中的代码片段
try {
// 查找包含"Clone this model repository"标题的对话框
const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]');
if (cloneDialog) {
// 查找对话框中所有的代码片段和命令示例
const codeElements = cloneDialog.querySelectorAll('pre, code, span');
codeElements.forEach(element => {
if (element.textContent && element.textContent.includes('huggingface.co')) {
if (element.innerHTML.includes('huggingface.co')) {
element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com');
} else {
element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
}
// 更精确地定位克隆命令中的域名
document.querySelectorAll('[data-target]').forEach(container => {
const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string');
codeBlocks.forEach(block => {
if (block.textContent && block.textContent.includes('huggingface.co')) {
if (block.innerHTML.includes('huggingface.co')) {
block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com');
} else {
block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
});
} catch (e) {
// 错误处理但不打印日志
}
}
// 当DOM加载完成后执行替换
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', () => {
replaceHeaderBranding();
replaceNavigationLinks();
replaceCodeDomains();
// 只在必要时执行替换 - 3秒后再次检查
setTimeout(() => {
if (!window._navLinksReplaced) {
console.log('[Client] 3秒后重新检查导航链接');
replaceNavigationLinks();
}
}, 3000);
});
} else {
replaceHeaderBranding();
replaceNavigationLinks();
replaceCodeDomains();
// 只在必要时执行替换 - 3秒后再次检查
setTimeout(() => {
if (!window._navLinksReplaced) {
console.log('[Client] 3秒后重新检查导航链接');
replaceNavigationLinks();
}
}, 3000);
}
// 增加一个MutationObserver来处理可能的动态元素加载
const observer = new MutationObserver(mutations => {
// 检查是否导航区域有变化
const hasNavChanges = mutations.some(mutation => {
// 检查是否存在header或nav元素变化
return Array.from(mutation.addedNodes).some(node => {
if (node.nodeType === Node.ELEMENT_NODE) {
// 检查是否是导航元素或其子元素
if (node.tagName === 'HEADER' || node.tagName === 'NAV' ||
node.querySelector('header, nav')) {
return true;
}
// 检查是否在导航元素内部
let parent = node.parentElement;
while (parent) {
if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') {
return true;
}
parent = parent.parentElement;
}
}
return false;
});
});
// 只在导航区域有变化时执行替换
if (hasNavChanges) {
// 重置替换状态,允许再次替换
window._navLinksReplaced = false;
replaceHeaderBranding();
replaceNavigationLinks();
}
});
// 开始观察document.body的变化,包括子节点
if (document.body) {
observer.observe(document.body, { childList: true, subtree: true });
} else {
document.addEventListener('DOMContentLoaded', () => {
observer.observe(document.body, { childList: true, subtree: true });
});
}
})();
"},"repo_name":{"kind":"string","value":"couchbaselabs/touchbase"},"path":{"kind":"string","value":"TouchbaseModular/public/deprecated/pictureUpload.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3981,"string":"3,981"}}},{"rowIdx":1002,"cells":{"code":{"kind":"string","value":"package com.ms.meizinewsapplication.features.meizi.model;\n\nimport android.content.Context;\n\nimport com.ms.meizinewsapplication.features.base.pojo.ImgItem;\nimport com.ms.retrofitlibrary.web.MyOkHttpClient;\n\nimport org.loader.model.OnModelListener;\n\nimport java.util.List;\n\nimport rx.Observable;\nimport rx.Subscription;\n\n/**\n * Created by 啟成 on 2016/3/15.\n */\npublic class DbGroupBreastModel extends DbGroupModel {\n\n private String pager_offset;\n\n public Subscription loadWeb(Context context, OnModelListener> listener, String pager_offset) {\n this.pager_offset = pager_offset;\n return loadWeb(context, listener);\n\n }\n\n @Override\n protected Subscription reSubscription(Context context, OnModelListener> listener) {\n Observable dbGroupBreast = getDbGroup().RxDbGroupBreast(\n MyOkHttpClient.getCacheControl(context),\n pager_offset\n );\n\n return rxDbGroup(dbGroupBreast, listener);\n }\n}\n"},"repo_name":{"kind":"string","value":"qq137712630/MeiZiNews"},"path":{"kind":"string","value":"app/src/main/java/com/ms/meizinewsapplication/features/meizi/model/DbGroupBreastModel.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1004,"string":"1,004"}}},{"rowIdx":1003,"cells":{"code":{"kind":"string","value":"using System.Reflection;\nusing System.Runtime.CompilerServices;\nusing System.Runtime.InteropServices;\n\n// General Information about an assembly is controlled through the following \n// set of attributes. Change these attribute values to modify the information\n// associated with an assembly.\n[assembly: AssemblyTitle(\"12_RectangleProperties\")]\n[assembly: AssemblyDescription(\"\")]\n[assembly: AssemblyConfiguration(\"\")]\n[assembly: AssemblyCompany(\"\")]\n[assembly: AssemblyProduct(\"12_RectangleProperties\")]\n[assembly: AssemblyCopyright(\"Copyright © 2017\")]\n[assembly: AssemblyTrademark(\"\")]\n[assembly: AssemblyCulture(\"\")]\n\n// Setting ComVisible to false makes the types in this assembly not visible \n// to COM components. If you need to access a type in this assembly from \n// COM, set the ComVisible attribute to true on that type.\n[assembly: ComVisible(false)]\n\n// The following GUID is for the ID of the typelib if this project is exposed to COM\n[assembly: Guid(\"efacbe98-13fb-4c4d-b368-04e2f314a249\")]\n\n// Version information for an assembly consists of the following four values:\n//\n// Major Version\n// Minor Version \n// Build Number\n// Revision\n//\n// You can specify all the values or you can default the Build and Revision Numbers \n// by using the '*' as shown below:\n// [assembly: AssemblyVersion(\"1.0.*\")]\n[assembly: AssemblyVersion(\"1.0.0.0\")]\n[assembly: AssemblyFileVersion(\"1.0.0.0\")]\n"},"repo_name":{"kind":"string","value":"nellypeneva/SoftUniProjects"},"path":{"kind":"string","value":"01_ProgrFundamentalsMay/11_Data-Types-Exercises/12_RectangleProperties/Properties/AssemblyInfo.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1420,"string":"1,420"}}},{"rowIdx":1004,"cells":{"code":{"kind":"string","value":"import logging.handlers\nimport os\n\n_pabotlog = logging.getLogger('PABot')\n\n_pabotlog.setLevel(logging.DEBUG)\n\n_logPath = os.path.abspath(\"./logging/pabot.log\")\n\n_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')\n\n_consoleStreamHandler = logging.StreamHandler()\n_consoleStreamHandler.setLevel(logging.DEBUG)\n_consoleStreamHandler.setFormatter(_formatter)\n\n_symLogRotFileHandler = logging.handlers.RotatingFileHandler(_logPath, maxBytes=2000000, backupCount=5)\n_symLogRotFileHandler.setLevel(logging.DEBUG)\n_symLogRotFileHandler.setFormatter(_formatter)\n\n_pabotlog.addHandler(_consoleStreamHandler)\n_pabotlog.addHandler(_symLogRotFileHandler)\n\n\ndef LogPABotMessage(message):\n _pabotlog.info(message)\n\n\ndef LogPABotError(message):\n _pabotlog.error(message)\n"},"repo_name":{"kind":"string","value":"KevinJMcGrath/Symphony-Ares"},"path":{"kind":"string","value":"modules/plugins/PABot/logging.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":796,"string":"796"}}},{"rowIdx":1005,"cells":{"code":{"kind":"string","value":"{# ------------------------------------------------------- #}\n{# INDIVIDUAL VIEW FOR EACH storycontributor #}\n{# This page can use any data from http:localhost:2000/cms/#/form/storycontributor/ #}\n{# Webhook uses the SWIG.js (like Djagno/Twig) templating system. Their documentation is here: #}\n{# http://paularmstrong.github.io/swig/docs/tags/ #}\n{# Learn about calling data into Webhook pages here: #}\n{# http://www.webhook.com/docs/template-rules-and-filters/ #}\n{# ------------------------------------------------------- #}\n{# Confused what extends and blocks do? Watch a primer: #}\n{# http://www.webhook.com/docs/template-inheritance-blocks/ #}\n{% extends \"templates/partials/base.html\" %}\n{# This sets our page . It will append this storycontributor's name to the site title defined in base.html #}\n{% block title %}{% parent %} - {{ item.name }}{% endblock %}\n{% block content %}\n View a list of all storycontributor
\n {{ item.name }} \n \n \n Name: \n {{ item.name }}\n \n \n Create Date: \n {# Format the date. You can use PHP's date function to format as needed. http://php.net/manual/en/function.date.php #}\n {{ item.create_date|date('F d Y') }}\n \n \n Last Updated: \n {# Format the date. You can use PHP's date function to format as needed. http://php.net/manual/en/function.date.php #}\n {{ item.last_updated|date('F d Y') }}\n \n \n Publish Date: \n {# Format the date. You can use PHP's date function to format as needed. http://php.net/manual/en/function.date.php #}\n {{ item.publish_date|date('F d Y') }}\n \n \n First Name: \n {{ item.first_name }}\n \n \n Last Name: \n {{ item.last_name }}\n \n \n Title: \n {{ item.title }}\n \n \n Company: \n {{ item.company }}\n \n \n Bio - Short: \n {{ item.bio__short }}\n \n \n Bio - Full: \n {{ item.bio__full|safe }}\n \n \n Avatar: \n {# You can pull out a lot more information from the image property. Info here: #}\n {# http://www.webhook.com/docs/widget-template-reference/#image #}\n \n \n \n Website: \n {{ item.website }}\n \n \n Twitter: \n {{ item.twitter }}\n \n \n LinkedIn: \n {{ item.linkedin }}\n \n \n Preview URL: \n {{ item.preview_url }}\n \n \n Slug: \n {{ item.slug }}\n \n \n Story (Contributor - Primary): \n {# Relations require some special code. More info about relations here: #}\n {# http://www.webhook.com/docs/template-rules-and-filters/#getitem #}\n {% for relation in item.story_contributor__primary %}\n {# You can ouput more than just the name. Feel free to output more fields from the CMS. #}\n {{ relation.name }} {% if not loop.last %},{% endif %}\n {% endfor %}\n \n \n Story (Contributor - Additional): \n {# Relations require some special code. More info about relations here: #}\n {# http://www.webhook.com/docs/template-rules-and-filters/#getitem #}\n {% for relation in item.story_contributor__additional %}\n {# You can ouput more than just the name. Feel free to output more fields from the CMS. #}\n {{ relation.name }} {% if not loop.last %},{% endif %}\n {% endfor %}\n \n \n{% endblock %}\n"},"repo_name":{"kind":"string","value":"ericwebster/project-crayfish"},"path":{"kind":"string","value":"templates/storycontributor/individual.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":4116,"string":"4,116"}}},{"rowIdx":1006,"cells":{"code":{"kind":"string","value":"//\n// BBBClockViewController.h\n// BBBLayer\n//\n// Created by LinBin on 16/7/23.\n// Copyright © 2016年 LinBin. All rights reserved.\n//\n\n#import \n\n@interface BBBClockViewController : UIViewController\n\n@end\n"},"repo_name":{"kind":"string","value":"0bin/Project-collection"},"path":{"kind":"string","value":"BBBLayer/BBBLayer/BBBClockViewController.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":224,"string":"224"}}},{"rowIdx":1007,"cells":{"code":{"kind":"string","value":"setDI($this->di);\n\n $all = $comments->findAll($page);\n\n $this->views->add('comment/comments', [\n 'comments' => $all,\n ]);\n }\n\n\n\n /**\n * Add a comment.\n *\n * @return void\n */\n public function addAction()\n {\n $isPosted = $this->request->getPost('doCreate');\n\n if (!$isPosted) {\n $this->response->redirect($this->request->getPost('redirect'));\n }\n\n $comment = [\n 'page' => $this->request->getPost('page'),\n 'content' => $this->request->getPost('content'),\n 'name' => $this->request->getPost('name'),\n 'web' => $this->request->getPost('web'),\n 'mail' => $this->request->getPost('mail'),\n 'timestamp' => time(),\n 'ip' => $this->request->getServer('REMOTE_ADDR'),\n ];\n\n $comments = new \\Phpmvc\\Comment\\CommentsInSession();\n $comments->setDI($this->di);\n\n $comments->add($comment);\n\n $this->response->redirect($this->request->getPost('redirect'));\n }\n\n\n\n /**\n * Remove all comments.\n *\n * @return void\n */\n public function removeAllAction()\n {\n $isPosted = $this->request->getPost('doRemoveAll');\n\n if (!$isPosted) {\n $this->response->redirect($this->request->getPost('redirect'));\n }\n\n $comments = new \\Phpmvc\\Comment\\CommentsInSession();\n $comments->setDI($this->di);\n\n $comments->deleteAll();\n\n $this->response->redirect($this->request->getPost('redirect'));\n }\n\n public function removeAction($id)\n {\n // $isPosted = $this->request->getPost('doRemove'); //doRemove måste lägga till i formulär i tpl.\n\n // if (!$isPosted) {\n // $this->response->redirect($this->request->getPost('redirect'));\n // }\n\n $comments = new \\Phpmvc\\Comment\\CommentsInSession();\n $comments->setDI($this->di);\n\n $comments->delete($id);\n\n $this->response->redirect($this->request->getPost('redirect'));\n }\n\n public function editFormAction($id)\n {\n $comments = new \\Phpmvc\\Comment\\CommentsInSession();\n $comments->setDI($this->di);\n\n $all = $comments->findAll();\n\n $i = 0;\n\n foreach($all as $comment){\n\n if($comment['id'] == $id){\n break;\n }\n $i++;\n }\n\n $this->views->add('comment/editComment', [\n 'comment' => $all[$i],\n ]);\n }\n\n public function editAction($id)\n {\n $isPosted = $this->request->getPost('doEdit');\n\n if (!$isPosted) {\n $this->response->redirect($this->request->getPost('redirect'));\n }\n\n $comment = [\n 'page' => $this->request->getPost('page'),\n 'content' => $this->request->getPost('content'),\n 'name' => $this->request->getPost('name'),\n 'web' => $this->request->getPost('web'),\n 'mail' => $this->request->getPost('mail'),\n 'timestamp' => $this->request->getPost('timestamp'),\n 'ip' => $this->request->getServer('REMOTE_ADDR'),\n 'id' => $id,\n 'edited' => time(),\n ];\n\n $comments = new \\Phpmvc\\Comment\\CommentsInSession();\n $comments->setDI($this->di);\n\n $comments->edit($comment, $id);\n\n $this->response->redirect($this->request->getPost('redirect'));\n }\n}\n"},"repo_name":{"kind":"string","value":"frjf14/Projekt"},"path":{"kind":"string","value":"vendor/phpmvc/comment/src/Comment/CommentController.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3878,"string":"3,878"}}},{"rowIdx":1008,"cells":{"code":{"kind":"string","value":"#!/bin/bash\n\n# Exit immediately if any commands return non-zero\nset -e\n# Output the commands we run\nset -x\n\n# This is a modified version of the Cloud Foundry Blue/Green deployment guide:\n# https://docs.pivotal.io/pivotalcf/devguide/deploy-apps/blue-green.html\n\ntest $URL\n\n# Update the blue app\ncf unmap-route citizenship-appointment-blue $URL\ncf push citizenship-appointment-blue -b https://github.com/AusDTO/java-buildpack.git --no-hostname --no-manifest --no-route -p build/libs/citizenship-appointments-0.0.1.jar -i 1 -m 512M\ncf map-route citizenship-appointment-blue $URL\n\n# Update the green app\ncf unmap-route citizenship-appointment-green $URL\ncf push citizenship-appointment-green -b https://github.com/AusDTO/java-buildpack.git --no-hostname --no-manifest --no-route -p build/libs/citizenship-appointments-0.0.1.jar -i 1 -m 512M\ncf map-route citizenship-appointment-green $URL\n\n"},"repo_name":{"kind":"string","value":"AusDTO/citizenship-appointment-server"},"path":{"kind":"string","value":"bin/cideploy.sh"},"language":{"kind":"string","value":"Shell"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":886,"string":"886"}}},{"rowIdx":1009,"cells":{"code":{"kind":"string","value":"import ast\nimport heisenberg.library.heisenberg_dynamics_context\nimport heisenberg.library.orbit_plot\nimport heisenberg.option_parser\nimport heisenberg.plot\nimport heisenberg.util\nimport matplotlib\nimport numpy as np\nimport sys\n\n# https://github.com/matplotlib/matplotlib/issues/5907 says this should fix \"Exceeded cell block limit\" problems\nmatplotlib.rcParams['agg.path.chunksize'] = 10000\n\ndynamics_context = heisenberg.library.heisenberg_dynamics_context.Numeric()\n\nop = heisenberg.option_parser.OptionParser(module=heisenberg.plot)\n# Add the subprogram-specific options here.\nop.add_option(\n '--initial-preimage',\n dest='initial_preimage',\n type='string',\n help='Specifies the preimage of the initial conditions with respect to the embedding map specified by the --embedding-dimension and --embedding-solution-sheet-index option values. Should have the form [x_1,...,x_n], where n is the embedding dimension and x_i is a floating point literal for each i.'\n)\nop.add_option(\n '--initial',\n dest='initial',\n type='string',\n help='Specifies the initial conditions [x,y,z,p_x,p_y,p_z], where each of x,y,z,p_x,p_y,p_z are floating point literals.'\n)\nop.add_option(\n '--optimization-iterations',\n dest='optimization_iterations',\n default=1000,\n type='int',\n help='Specifies the number of iterations to run the optimization for (if applicable). Default is 1000.'\n)\nop.add_option(\n '--optimize-initial',\n dest='optimize_initial',\n action='store_true',\n default=False,\n help='Indicates that the specified initial condition (via whichever of the --initial... options) should be used as the starting point for an optimization to attempt to close the orbit. Default value is False.'\n)\nop.add_option(\n '--output-dir',\n dest='output_dir',\n default='.',\n help='Specifies the directory to write plot images and data files to. Default is current directory.'\n)\nop.add_option(\n '--disable-plot-initial',\n dest='disable_plot_initial',\n action='store_true',\n default=False,\n help='Disables plotting the initial curve; only has effect if --optimize-initial is specified.'\n)\n\noptions,args = op.parse_argv_and_validate()\nif options is None:\n sys.exit(-1)\n\nnum_initial_conditions_specified = sum([\n options.initial_preimage is not None,\n options.initial is not None,\n])\nif num_initial_conditions_specified != 1:\n print('Some initial condition option must be specified; --initial-preimage, --initial. However, {0} of those were specified.'.format(num_initial_conditions_specified))\n op.print_help()\n sys.exit(-1)\n\n# Validate subprogram-specific options here.\n\n# Attempt to parse initial conditions. Upon success, the attribute options.qp_0 should exist.\nif options.initial_preimage is not None:\n try:\n options.initial_preimage = np.array(ast.literal_eval(options.initial_preimage))\n expected_shape = (options.embedding_dimension,)\n if options.initial_preimage.shape != expected_shape:\n raise ValueError('--initial-preimage value had the wrong number of components (got {0} but expected {1}).'.format(options.initial_preimage.shape, expected_shape))\n options.qp_0 = dynamics_context.embedding(N=options.embedding_dimension, sheet_index=options.embedding_solution_sheet_index)(options.initial_preimage)\n except Exception as e:\n print('error parsing --initial-preimage value; error was {0}'.format(e))\n op.print_help()\n sys.exit(-1)\nelif options.initial is not None:\n try:\n options.initial = heisenberg.util.csv_as_ndarray(heisenberg.util.pop_brackets_off_of(options.initial), float)\n expected_shape = (6,)\n if options.initial.shape != expected_shape:\n raise ValueError('--initial value had the wrong number of components (got {0} but expected {1}).'.format(options.initial.shape, expected_shape))\n options.qp_0 = options.initial.reshape(2,3)\n except ValueError as e:\n print('error parsing --initial value: {0}'.format(str(e)))\n op.print_help()\n sys.exit(-1)\nelse:\n assert False, 'this should never happen because of the check with num_initial_conditions_specified'\n\nrng = np.random.RandomState(options.seed)\nheisenberg.plot.plot(dynamics_context, options, rng=rng)\n"},"repo_name":{"kind":"string","value":"vdods/heisenberg"},"path":{"kind":"string","value":"heisenberg/plot/__main__.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":4283,"string":"4,283"}}},{"rowIdx":1010,"cells":{"code":{"kind":"string","value":"def send_simple_message():\n return requests.post(\n \"https://api.mailgun.net/v3/sandbox049ff464a4d54974bb0143935f9577ef.mailgun.org/messages\",\n auth=(\"api\", \"key-679dc79b890e700f11f001a6bf86f4a1\"),\n data={\"from\": \"Mailgun Sandbox \",\n \"to\": \"nick \",\n \"subject\": \"Hello nick\",\n \"text\": \"Congratulations nick, you just sent an email with Mailgun! You are truly awesome! You can see a record of this email in your logs: https://mailgun.com/cp/log . You can send up to 300 emails/day from this sandbox server. Next, you should add your own domain so you can send 10,000 emails/month for free.\"})\n\n\n# cURL command to send mail aith API key\n# curl -s --user 'api:key-679dc79b890e700f11f001a6bf86f4a1' \\\n# https://api.mailgun.net/v3/mail.pdxpixel.com/messages \\\n# -F from='Excited User ' \\\n# -F to=nick@pdxpixel.com \\\n# -F subject='Hello' \\\n# -F text='Testing some Mailgun awesomness!'\n"},"repo_name":{"kind":"string","value":"nicorellius/pdxpixel"},"path":{"kind":"string","value":"pdxpixel/core/mailgun.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1073,"string":"1,073"}}},{"rowIdx":1011,"cells":{"code":{"kind":"string","value":" vchTmp(pend-pbegin+1, 0);\n reverse_copy(pbegin, pend, vchTmp.begin());\n\n // Convert little endian data to bignum\n CBigNum bn;\n bn.setvch(vchTmp);\n\n // Convert bignum to string\n string str;\n str.reserve((pend - pbegin) * 138 / 100 + 1);\n CBigNum dv;\n CBigNum rem;\n while (bn > bn0)\n {\n if (!BN_div(&dv, &rem, &bn, &bn58, pctx))\n throw bignum_error(\"EncodeBase58 : BN_div failed\");\n bn = dv;\n unsigned int c = rem.getulong();\n str += pszBase58[c];\n }\n\n // Leading zeroes encoded as base58 zeros\n for (const unsigned char* p = pbegin; p < pend && *p == 0; p++)\n str += pszBase58[0];\n\n // Convert little endian string to big endian\n reverse(str.begin(), str.end());\n return str;\n}\n\ninline string EncodeBase58(const vector& vch)\n{\n return EncodeBase58(&vch[0], &vch[0] + vch.size());\n}\n\ninline bool DecodeBase58(const char* psz, vector& vchRet)\n{\n CAutoBN_CTX pctx;\n vchRet.clear();\n CBigNum bn58 = 58;\n CBigNum bn = 0;\n CBigNum bnChar;\n while (isspace(*psz))\n psz++;\n\n // Convert big endian string to bignum\n for (const char* p = psz; *p; p++)\n {\n const char* p1 = strchr(pszBase58, *p);\n if (p1 == NULL)\n {\n while (isspace(*p))\n p++;\n if (*p != '\\0')\n return false;\n break;\n }\n bnChar.setulong(p1 - pszBase58);\n if (!BN_mul(&bn, &bn, &bn58, pctx))\n throw bignum_error(\"DecodeBase58 : BN_mul failed\");\n bn += bnChar;\n }\n\n // Get bignum as little endian data\n vector vchTmp = bn.getvch();\n\n // Trim off sign byte if present\n if (vchTmp.size() >= 2 && vchTmp.end()[-1] == 0 && vchTmp.end()[-2] >= 0x80)\n vchTmp.erase(vchTmp.end()-1);\n\n // Restore leading zeros\n int nLeadingZeros = 0;\n for (const char* p = psz; *p == pszBase58[0]; p++)\n nLeadingZeros++;\n vchRet.assign(nLeadingZeros + vchTmp.size(), 0);\n\n // Convert little endian data to big endian\n reverse_copy(vchTmp.begin(), vchTmp.end(), vchRet.end() - vchTmp.size());\n return true;\n}\n\ninline bool DecodeBase58(const string& str, vector& vchRet)\n{\n return DecodeBase58(str.c_str(), vchRet);\n}\n\n\n\n\n\ninline string EncodeBase58Check(const vector& vchIn)\n{\n // add 4-byte hash check to the end\n vector vch(vchIn);\n uint256 hash = Hash(vch.begin(), vch.end());\n vch.insert(vch.end(), (unsigned char*)&hash, (unsigned char*)&hash + 4);\n return EncodeBase58(vch);\n}\n\ninline bool DecodeBase58Check(const char* psz, vector& vchRet)\n{\n if (!DecodeBase58(psz, vchRet))\n return false;\n if (vchRet.size() < 4)\n {\n vchRet.clear();\n return false;\n }\n uint256 hash = Hash(vchRet.begin(), vchRet.end()-4);\n if (memcmp(&hash, &vchRet.end()[-4], 4) != 0)\n {\n vchRet.clear();\n return false;\n }\n vchRet.resize(vchRet.size()-4);\n return true;\n}\n\ninline bool DecodeBase58Check(const string& str, vector& vchRet)\n{\n return DecodeBase58Check(str.c_str(), vchRet);\n}\n\n\n\n\n\n\nstatic const unsigned char ADDRESSVERSION = 0;\n\ninline string Hash160ToAddress(uint160 hash160)\n{\n // add 1-byte version number to the front\n vector vch(1, ADDRESSVERSION);\n vch.insert(vch.end(), UBEGIN(hash160), UEND(hash160));\n return EncodeBase58Check(vch);\n}\n\ninline bool AddressToHash160(const char* psz, uint160& hash160Ret)\n{\n vector vch;\n if (!DecodeBase58Check(psz, vch))\n return false;\n if (vch.empty())\n return false;\n unsigned char nVersion = vch[0];\n if (vch.size() != sizeof(hash160Ret) + 1)\n return false;\n memcpy(&hash160Ret, &vch[1], sizeof(hash160Ret));\n return (nVersion <= ADDRESSVERSION);\n}\n\ninline bool AddressToHash160(const string& str, uint160& hash160Ret)\n{\n return AddressToHash160(str.c_str(), hash160Ret);\n}\n\ninline bool IsValidBitcoinAddress(const char* psz)\n{\n uint160 hash160;\n return AddressToHash160(psz, hash160);\n}\n\ninline bool IsValidBitcoinAddress(const string& str)\n{\n return IsValidBitcoinAddress(str.c_str());\n}\n\n\n\n\ninline string PubKeyToAddress(const vector& vchPubKey)\n{\n return Hash160ToAddress(Hash160(vchPubKey));\n}\n"},"repo_name":{"kind":"string","value":"originalbitcoin/original-bitcoin-client-0.1.0"},"path":{"kind":"string","value":"base58.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":5438,"string":"5,438"}}},{"rowIdx":1014,"cells":{"code":{"kind":"string","value":"\"\"\"Main entry points for scripts.\"\"\"\n\nfrom __future__ import print_function, division\n\nfrom argparse import ArgumentParser\nfrom collections import OrderedDict\nfrom copy import copy\nfrom datetime import datetime\nimport glob\nimport json\nimport logging\nimport math\nimport os\n\nimport scipy.stats\nimport numpy as np\n\nfrom .version import __version__\nfrom .psffuncs import gaussian_moffat_psf\nfrom .psf import TabularPSF, GaussianMoffatPSF\nfrom .io import read_datacube, write_results, read_results\nfrom .fitting import (guess_sky, fit_galaxy_single, fit_galaxy_sky_multi,\n fit_position_sky, fit_position_sky_sn_multi,\n RegularizationPenalty)\nfrom .utils import yxbounds\nfrom .extern import ADR, Hyper_PSF3D_PL\n\n\n__all__ = [\"cubefit\", \"cubefit_subtract\", \"cubefit_plot\"]\n\nMODEL_SHAPE = (32, 32)\nSPAXEL_SIZE = 0.43\nMIN_NMAD = 2.5 # Minimum Number of Median Absolute Deviations above\n # the minimum spaxel value in fit_position\nLBFGSB_FACTOR = 1e10\nREFWAVE = 5000. # reference wavelength in Angstroms for PSF params and ADR\nPOSITION_BOUND = 3. # Bound on fitted positions relative in initial positions\n\ndef snfpsf(wave, psfparams, header, psftype):\n \"\"\"Create a 3-d PSF based on SNFactory-specific parameterization of\n Gaussian + Moffat PSF parameters and ADR.\"\"\"\n\n # Get Gaussian+Moffat parameters at each wavelength.\n relwave = wave / REFWAVE - 1.0\n ellipticity = abs(psfparams[0]) * np.ones_like(wave)\n alpha = np.abs(psfparams[1] +\n psfparams[2] * relwave +\n psfparams[3] * relwave**2)\n\n # correlated parameters (coefficients determined externally)\n sigma = 0.545 + 0.215 * alpha # Gaussian parameter\n beta = 1.685 + 0.345 * alpha # Moffat parameter\n eta = 1.040 + 0.0 * alpha # gaussian ampl. / moffat ampl.\n\n # Atmospheric differential refraction (ADR): Because of ADR,\n # the center of the PSF will be different at each wavelength,\n # by an amount that we can determine (pretty well) from the\n # atmospheric conditions and the pointing and angle of the\n # instrument. We calculate the offsets here as a function of\n # observation and wavelength and input these to the model.\n\n # Correction to parallactic angle and airmass for 2nd-order effects\n # such as MLA rotation, mechanical flexures or finite-exposure\n # corrections. These values have been trained on faint-std star\n # exposures.\n #\n # `predict_adr_params` uses 'AIRMASS', 'PARANG' and 'CHANNEL' keys\n # in input dictionary.\n delta, theta = Hyper_PSF3D_PL.predict_adr_params(header)\n\n # check for crazy values of pressure and temperature, and assign default\n # values.\n pressure = header.get('PRESSURE', 617.)\n if not 550. < pressure < 650.:\n pressure = 617.\n temp = header.get('TEMP', 2.)\n if not -20. < temp < 20.:\n temp = 2.\n\n adr = ADR(pressure, temp, lref=REFWAVE, delta=delta, theta=theta)\n adr_refract = adr.refract(0, 0, wave, unit=SPAXEL_SIZE)\n \n # adr_refract[0, :] corresponds to x, adr_refract[1, :] => y\n xctr, yctr = adr_refract\n\n if psftype == 'gaussian-moffat':\n return GaussianMoffatPSF(sigma, alpha, beta, ellipticity, eta,\n yctr, xctr, MODEL_SHAPE, subpix=3)\n\n elif psftype == 'tabular':\n A = gaussian_moffat_psf(sigma, alpha, beta, ellipticity, eta,\n yctr, xctr, MODEL_SHAPE, subpix=3)\n return TabularPSF(A)\n else:\n raise ValueError(\"unknown psf type: \" + repr(psftype))\n\n\ndef setup_logging(loglevel, logfname=None):\n\n # if loglevel isn't an integer, parse it as \"debug\", \"info\", etc:\n if not isinstance(loglevel, int):\n loglevel = getattr(logging, loglevel.upper(), None)\n if not isinstance(loglevel, int):\n print('Invalid log level: %s' % loglevel)\n exit(1)\n\n # remove logfile if it already exists\n if logfname is not None and os.path.exists(logfname):\n os.remove(logfname)\n\n logging.basicConfig(filename=logfname, format=\"%(levelname)s %(message)s\",\n level=loglevel)\n\n\ndef cubefit(argv=None):\n\n DESCRIPTION = \"Fit SN + galaxy model to SNFactory data cubes.\"\n\n parser = ArgumentParser(prog=\"cubefit\", description=DESCRIPTION)\n parser.add_argument(\"configfile\",\n help=\"configuration file name (JSON format)\")\n parser.add_argument(\"outfile\", help=\"Output file name (FITS format)\")\n parser.add_argument(\"--dataprefix\", default=\"\",\n help=\"path prepended to data file names; default is \"\n \"empty string\")\n parser.add_argument(\"--logfile\", help=\"Write log to this file \"\n \"(default: print to stdout)\", default=None)\n parser.add_argument(\"--loglevel\", default=\"info\",\n help=\"one of: debug, info, warning (default is info)\")\n parser.add_argument(\"--diagdir\", default=None,\n help=\"If given, write intermediate diagnostic results \"\n \"to this directory\")\n parser.add_argument(\"--refitgal\", default=False, action=\"store_true\",\n help=\"Add an iteration where galaxy model is fit \"\n \"using all epochs and then data/SN positions are \"\n \"refit\")\n parser.add_argument(\"--mu_wave\", default=0.07, type=float,\n help=\"Wavelength regularization parameter. \"\n \"Default is 0.07.\")\n parser.add_argument(\"--mu_xy\", default=0.001, type=float,\n help=\"Spatial regularization parameter. \"\n \"Default is 0.001.\")\n parser.add_argument(\"--psftype\", default=\"gaussian-moffat\",\n help=\"Type of PSF: 'gaussian-moffat' or 'tabular'. \"\n \"Currently, tabular means generate a tabular PSF from \"\n \"gaussian-moffat parameters.\")\n args = parser.parse_args(argv)\n\n setup_logging(args.loglevel, logfname=args.logfile)\n\n # record start time\n tstart = datetime.now()\n logging.info(\"cubefit v%s started at %s\", __version__,\n tstart.strftime(\"%Y-%m-%d %H:%M:%S\"))\n tsteps = OrderedDict() # finish time of each step.\n\n logging.info(\"parameters: mu_wave={:.3g} mu_xy={:.3g} refitgal={}\"\n .format(args.mu_wave, args.mu_xy, args.refitgal))\n logging.info(\" psftype={}\".format(args.psftype))\n\n logging.info(\"reading config file\")\n with open(args.configfile) as f:\n cfg = json.load(f)\n\n # basic checks on config contents.\n assert (len(cfg[\"filenames\"]) == len(cfg[\"xcenters\"]) ==\n len(cfg[\"ycenters\"]) == len(cfg[\"psf_params\"]))\n\n # -------------------------------------------------------------------------\n # Load data cubes from the list of FITS files.\n\n nt = len(cfg[\"filenames\"])\n\n logging.info(\"reading %d data cubes\", nt)\n cubes = []\n for fname in cfg[\"filenames\"]:\n logging.debug(\" reading %s\", fname)\n cubes.append(read_datacube(os.path.join(args.dataprefix, fname)))\n wave = cubes[0].wave\n nw = len(wave)\n\n # assign some local variables for convenience\n refs = cfg[\"refs\"]\n master_ref = cfg[\"master_ref\"]\n if master_ref not in refs:\n raise ValueError(\"master ref choice must be one of the final refs (\" +\n \" \".join(refs.astype(str)) + \")\")\n nonmaster_refs = [i for i in refs if i != master_ref]\n nonrefs = [i for i in range(nt) if i not in refs]\n\n # Ensure that all cubes have the same wavelengths.\n if not all(np.all(cubes[i].wave == wave) for i in range(1, nt)):\n raise ValueError(\"all data must have same wavelengths\")\n\n # -------------------------------------------------------------------------\n # PSF for each observation\n\n logging.info(\"setting up PSF for all %d epochs\", nt)\n psfs = [snfpsf(wave, cfg[\"psf_params\"][i], cubes[i].header, args.psftype)\n for i in range(nt)]\n\n # -------------------------------------------------------------------------\n # Initialize all model parameters to be fit\n\n yctr0 = np.array(cfg[\"ycenters\"])\n xctr0 = np.array(cfg[\"xcenters\"])\n\n galaxy = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)\n sn = np.zeros((nt, nw), dtype=np.float64) # SN spectrum at each epoch\n skys = np.zeros((nt, nw), dtype=np.float64) # Sky spectrum at each epoch\n yctr = yctr0.copy()\n xctr = xctr0.copy()\n snctr = (0., 0.)\n\n # For writing out to FITS\n modelwcs = {\"CRVAL1\": -SPAXEL_SIZE * (MODEL_SHAPE[0] - 1) / 2.,\n \"CRPIX1\": 1,\n \"CDELT1\": SPAXEL_SIZE,\n \"CRVAL2\": -SPAXEL_SIZE * (MODEL_SHAPE[1] - 1) / 2.,\n \"CRPIX2\": 1,\n \"CDELT2\": SPAXEL_SIZE,\n \"CRVAL3\": cubes[0].header[\"CRVAL3\"],\n \"CRPIX3\": cubes[0].header[\"CRPIX3\"],\n \"CDELT3\": cubes[0].header[\"CDELT3\"]}\n\n # -------------------------------------------------------------------------\n # Position bounds\n\n # Bounds on data position: shape=(nt, 2)\n xctrbounds = np.vstack((xctr - POSITION_BOUND, xctr + POSITION_BOUND)).T\n yctrbounds = np.vstack((yctr - POSITION_BOUND, yctr + POSITION_BOUND)).T\n snctrbounds = (-POSITION_BOUND, POSITION_BOUND)\n\n # For data positions, check that bounds do not extend\n # past the edge of the model and adjust the minbound and maxbound.\n # This doesn't apply to SN position.\n gshape = galaxy.shape[1:3] # model shape\n for i in range(nt):\n dshape = cubes[i].data.shape[1:3]\n (yminabs, ymaxabs), (xminabs, xmaxabs) = yxbounds(gshape, dshape)\n yctrbounds[i, 0] = max(yctrbounds[i, 0], yminabs)\n yctrbounds[i, 1] = min(yctrbounds[i, 1], ymaxabs)\n xctrbounds[i, 0] = max(xctrbounds[i, 0], xminabs)\n xctrbounds[i, 1] = min(xctrbounds[i, 1], xmaxabs)\n\n # -------------------------------------------------------------------------\n # Guess sky\n\n logging.info(\"guessing sky for all %d epochs\", nt)\n for i, cube in enumerate(cubes):\n skys[i, :] = guess_sky(cube, npix=30)\n\n # -------------------------------------------------------------------------\n # Regularization penalty parameters\n\n # Calculate rough average galaxy spectrum from all final refs.\n spectra = np.zeros((len(refs), len(wave)), dtype=np.float64)\n for j, i in enumerate(refs):\n avg_spec = np.average(cubes[i].data, axis=(1, 2)) - skys[i]\n mean_spec, bins, bn = scipy.stats.binned_statistic(wave, avg_spec, \n bins=len(wave)/10)\n spectra[j] = np.interp(wave, bins[:-1] + np.diff(bins)[0]/2., \n mean_spec)\n mean_gal_spec = np.average(spectra, axis=0)\n # Ensure that there won't be any negative or tiny values in mean:\n mean_floor = 0.1 * np.median(mean_gal_spec)\n mean_gal_spec[mean_gal_spec < mean_floor] = mean_floor\n\n galprior = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)\n\n regpenalty = RegularizationPenalty(galprior, mean_gal_spec, args.mu_xy,\n args.mu_wave)\n\n tsteps[\"setup\"] = datetime.now()\n\n # -------------------------------------------------------------------------\n # Fit just the galaxy model to just the master ref.\n\n data = cubes[master_ref].data - skys[master_ref, :, None, None]\n weight = cubes[master_ref].weight\n\n logging.info(\"fitting galaxy to master ref [%d]\", master_ref)\n galaxy = fit_galaxy_single(galaxy, data, weight,\n (yctr[master_ref], xctr[master_ref]),\n psfs[master_ref], regpenalty, LBFGSB_FACTOR)\n\n if args.diagdir:\n fname = os.path.join(args.diagdir, 'step1.fits')\n write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,\n yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)\n\n tsteps[\"fit galaxy to master ref\"] = datetime.now()\n\n # -------------------------------------------------------------------------\n # Fit the positions of the other final refs\n #\n # Here we only use spaxels where the *model* has significant flux.\n # We define \"significant\" as some number of median absolute deviations\n # (MAD) above the minimum flux in the model. We (temporarily) set the\n # weight of \"insignificant\" spaxels to zero during this process, then\n # restore the original weight after we're done.\n #\n # If there are less than 20 \"significant\" spaxels, we do not attempt to\n # fit the position, but simply leave it as is.\n\n logging.info(\"fitting position of non-master refs %s\", nonmaster_refs)\n for i in nonmaster_refs:\n cube = cubes[i]\n\n # Evaluate galaxy on this epoch for purpose of masking spaxels.\n gal = psfs[i].evaluate_galaxy(galaxy, (cube.ny, cube.nx),\n (yctr[i], xctr[i]))\n\n # Set weight of low-valued spaxels to zero.\n gal2d = gal.sum(axis=0) # Sum of gal over wavelengths\n mad = np.median(np.abs(gal2d - np.median(gal2d)))\n mask = gal2d > np.min(gal2d) + MIN_NMAD * mad\n if mask.sum() < 20:\n continue\n\n weight = cube.weight * mask[None, :, :]\n\n fctr, fsky = fit_position_sky(galaxy, cube.data, weight,\n (yctr[i], xctr[i]), psfs[i],\n (yctrbounds[i], xctrbounds[i]))\n yctr[i], xctr[i] = fctr\n skys[i, :] = fsky\n\n tsteps[\"fit positions of other refs\"] = datetime.now()\n\n # -------------------------------------------------------------------------\n # Redo model fit, this time including all final refs.\n\n datas = [cubes[i].data for i in refs]\n weights = [cubes[i].weight for i in refs]\n ctrs = [(yctr[i], xctr[i]) for i in refs]\n psfs_refs = [psfs[i] for i in refs]\n logging.info(\"fitting galaxy to all refs %s\", refs)\n galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,\n psfs_refs, regpenalty, LBFGSB_FACTOR)\n\n # put fitted skys back in `skys`\n for i,j in enumerate(refs):\n skys[j, :] = fskys[i]\n\n if args.diagdir:\n fname = os.path.join(args.diagdir, 'step2.fits')\n write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,\n yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)\n\n tsteps[\"fit galaxy to all refs\"] = datetime.now()\n\n # -------------------------------------------------------------------------\n # Fit position of data and SN in non-references\n #\n # Now we think we have a good galaxy model. We fix this and fit\n # the relative position of the remaining epochs (which presumably\n # all have some SN light). We simultaneously fit the position of\n # the SN itself.\n\n logging.info(\"fitting position of all %d non-refs and SN position\",\n len(nonrefs))\n if len(nonrefs) > 0:\n datas = [cubes[i].data for i in nonrefs]\n weights = [cubes[i].weight for i in nonrefs]\n psfs_nonrefs = [psfs[i] for i in nonrefs]\n fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(\n galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],\n snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],\n xctrbounds[nonrefs], snctrbounds)\n\n # put fitted results back in parameter lists.\n yctr[nonrefs] = fyctr\n xctr[nonrefs] = fxctr\n for i,j in enumerate(nonrefs):\n skys[j, :] = fskys[i]\n sn[j, :] = fsne[i]\n\n tsteps[\"fit positions of nonrefs & SN\"] = datetime.now()\n\n # -------------------------------------------------------------------------\n # optional step(s)\n\n if args.refitgal and len(nonrefs) > 0:\n\n if args.diagdir:\n fname = os.path.join(args.diagdir, 'step3.fits')\n write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,\n yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)\n\n # ---------------------------------------------------------------------\n # Redo fit of galaxy, using ALL epochs, including ones with SN\n # light. We hold the SN \"fixed\" simply by subtracting it from the\n # data and fitting the remainder.\n #\n # This is slightly dangerous: any errors in the original SN\n # determination, whether due to an incorrect PSF or ADR model\n # or errors in the galaxy model will result in residuals. The\n # galaxy model will then try to compensate for these.\n #\n # We should look at the galaxy model at the position of the SN\n # before and after this step to see if there is a bias towards\n # the galaxy flux increasing.\n\n logging.info(\"fitting galaxy using all %d epochs\", nt)\n datas = [cube.data for cube in cubes]\n weights = [cube.weight for cube in cubes]\n ctrs = [(yctr[i], xctr[i]) for i in range(nt)]\n\n # subtract SN from non-ref cubes.\n for i in nonrefs:\n s = psfs[i].point_source(snctr, datas[i].shape[1:3], ctrs[i])\n # do *not* use in-place operation (-=) here!\n datas[i] = cubes[i].data - sn[i, :, None, None] * s\n\n galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,\n psfs, regpenalty, LBFGSB_FACTOR)\n for i in range(nt):\n skys[i, :] = fskys[i] # put fitted skys back in skys\n\n if args.diagdir:\n fname = os.path.join(args.diagdir, 'step4.fits')\n write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,\n yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)\n\n # ---------------------------------------------------------------------\n # Repeat step before last: fit position of data and SN in\n # non-references\n\n logging.info(\"re-fitting position of all %d non-refs and SN position\",\n len(nonrefs))\n if len(nonrefs) > 0:\n datas = [cubes[i].data for i in nonrefs]\n weights = [cubes[i].weight for i in nonrefs]\n psfs_nonrefs = [psfs[i] for i in nonrefs]\n fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(\n galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],\n snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],\n xctrbounds[nonrefs], snctrbounds)\n\n # put fitted results back in parameter lists.\n yctr[nonrefs] = fyctr\n xctr[nonrefs] = fxctr\n for i, j in enumerate(nonrefs):\n skys[j, :] = fskys[i]\n sn[j, :] = fsne[i]\n\n # -------------------------------------------------------------------------\n # Write results\n\n logging.info(\"writing results to %s\", args.outfile)\n write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,\n yctrbounds, xctrbounds, cubes, psfs, modelwcs, args.outfile)\n\n # time info\n logging.info(\"step times:\")\n maxlen = max(len(key) for key in tsteps)\n fmtstr = \" %2dm%02ds - %-\" + str(maxlen) + \"s\"\n tprev = tstart\n for key, tstep in tsteps.items():\n t = (tstep - tprev).seconds\n logging.info(fmtstr, t//60, t%60, key)\n tprev = tstep\n\n tfinish = datetime.now()\n logging.info(\"finished at %s\", tfinish.strftime(\"%Y-%m-%d %H:%M:%S\"))\n t = (tfinish - tstart).seconds\n logging.info(\"took %3dm%2ds\", t // 60, t % 60)\n\n return 0\n\n\ndef cubefit_subtract(argv=None):\n DESCRIPTION = \\\n\"\"\"Subtract model determined by cubefit from the original data.\n\nThe \"outnames\" key in the supplied configuration file is used to\ndetermine the output FITS file names. The input FITS header is passed\nunaltered to the output file, with the following additions:\n(1) A `HISTORY` entry. (2) `CBFT_SNX` and `CBFT_SNY` records giving\nthe cubefit-determined position of the SN relative to the center of\nthe data array (at the reference wavelength).\n\nThis script also writes fitted SN spectra to individual FITS files.\nThe \"sn_outnames\" configuration field determines the output filenames.\n\"\"\"\n\n import shutil\n\n import fitsio\n\n prog_name = \"cubefit-subtract\"\n prog_name_ver = \"{} v{}\".format(prog_name, __version__)\n parser = ArgumentParser(prog=prog_name, description=DESCRIPTION)\n parser.add_argument(\"configfile\", help=\"configuration file name \"\n \"(JSON format), same as cubefit input.\")\n parser.add_argument(\"resultfile\", help=\"Result FITS file from cubefit\")\n parser.add_argument(\"--dataprefix\", default=\"\",\n help=\"path prepended to data file names; default is \"\n \"empty string\")\n parser.add_argument(\"--outprefix\", default=\"\",\n help=\"path prepended to output file names; default is \"\n \"empty string\")\n args = parser.parse_args(argv)\n\n setup_logging(\"info\")\n\n # get input & output filenames\n with open(args.configfile) as f:\n cfg = json.load(f)\n fnames = [os.path.join(args.dataprefix, fname)\n for fname in cfg[\"filenames\"]]\n outfnames = [os.path.join(args.outprefix, fname)\n for fname in cfg[\"outnames\"]]\n\n # load results\n results = read_results(args.resultfile)\n epochs = results[\"epochs\"]\n sny, snx = results[\"snctr\"]\n if not len(epochs) == len(fnames) == len(outfnames):\n raise RuntimeError(\"number of epochs in result file not equal to \"\n \"number of input and output files in config file\")\n\n # subtract and write out.\n for fname, outfname, epoch in zip(fnames, outfnames, epochs):\n logging.info(\"writing %s\", outfname)\n shutil.copy(fname, outfname)\n f = fitsio.FITS(outfname, \"rw\")\n data = f[0].read()\n data -= epoch[\"galeval\"]\n f[0].write(data)\n f[0].write_history(\"galaxy subtracted by \" + prog_name_ver)\n f[0].write_key(\"CBFT_SNX\", snx - epoch['xctr'],\n comment=\"SN x offset from center at {:.0f} A [spaxels]\"\n .format(REFWAVE))\n f[0].write_key(\"CBFT_SNY\", sny - epoch['yctr'],\n comment=\"SN y offset from center at {:.0f} A [spaxels]\"\n .format(REFWAVE))\n f.close()\n\n # output SN spectra to separate files.\n sn_outnames = [os.path.join(args.outprefix, fname)\n for fname in cfg[\"sn_outnames\"]]\n header = {\"CRVAL1\": results[\"header\"][\"CRVAL3\"],\n \"CRPIX1\": results[\"header\"][\"CRPIX3\"],\n \"CDELT1\": results[\"header\"][\"CDELT3\"]}\n for outfname, epoch in zip(sn_outnames, epochs):\n logging.info(\"writing %s\", outfname)\n if os.path.exists(outfname): # avoid warning from clobber=True\n os.remove(outfname)\n with fitsio.FITS(outfname, \"rw\") as f:\n f.write(epoch[\"sn\"], extname=\"sn\", header=header)\n f[0].write_history(\"created by \" + prog_name_ver)\n\n return 0\n\n\ndef cubefit_plot(argv=None):\n DESCRIPTION = \"\"\"Plot results and diagnostics from cubefit\"\"\"\n\n from .plotting import plot_timeseries, plot_epoch, plot_sn, plot_adr\n\n # arguments are the same as cubefit except an output \n parser = ArgumentParser(prog=\"cubefit-plot\", description=DESCRIPTION)\n parser.add_argument(\"configfile\", help=\"configuration filename\")\n parser.add_argument(\"resultfile\", help=\"Result filename from cubefit\")\n parser.add_argument(\"outprefix\", help=\"output prefix\")\n parser.add_argument(\"--dataprefix\", default=\"\",\n help=\"path prepended to data file names; default is \"\n \"empty string\")\n parser.add_argument('-b', '--band', help='timeseries band (U, B, V). '\n 'Default is a 1000 A wide band in middle of cube.',\n default=None, dest='band')\n parser.add_argument('--idrfiles', nargs='+', default=None,\n help='Prefix of IDR. If given, the cubefit SN '\n 'spectra are plotted against the production values.')\n parser.add_argument(\"--diagdir\", default=None,\n help=\"If given, read intermediate diagnostic \"\n \"results from this directory and include in plot(s)\")\n parser.add_argument(\"--plotepochs\", default=False, action=\"store_true\",\n help=\"Make diagnostic plots for each epoch\")\n args = parser.parse_args(argv)\n\n # Read in data\n with open(args.configfile) as f:\n cfg = json.load(f)\n cubes = [read_datacube(os.path.join(args.dataprefix, fname), scale=False)\n for fname in cfg[\"filenames\"]]\n\n results = OrderedDict()\n\n # Diagnostic results at each step\n if args.diagdir is not None:\n fnames = sorted(glob.glob(os.path.join(args.diagdir, \"step*.fits\")))\n for fname in fnames:\n name = os.path.basename(fname).split(\".\")[0]\n results[name] = read_results(fname)\n\n # Final result (don't fail if not available)\n if os.path.exists(args.resultfile):\n results[\"final\"] = read_results(args.resultfile)\n\n # plot time series\n plot_timeseries(cubes, results, band=args.band,\n fname=(args.outprefix + '_timeseries.png'))\n\n # Plot wave slices and sn, galaxy and sky spectra for all epochs.\n if 'final' in results and args.plotepochs:\n for i_t in range(len(cubes)):\n plot_epoch(cubes[i_t], results['final']['epochs'][i_t],\n fname=(args.outprefix + '_epoch%02d.png' % i_t))\n\n # Plot result spectra against IDR spectra.\n if 'final' in results and args.idrfiles is not None:\n plot_sn(cfg['filenames'], results['final']['epochs']['sn'],\n results['final']['wave'], args.idrfiles,\n args.outprefix + '_sn.png')\n\n # Plot the x-y coordinates of the adr versus wavelength\n # (Skip this for now; contains no interesting information)\n #plot_adr(cubes, cubes[0].wave, fname=(args.outprefix + '_adr.png'))\n\n return 0\n"},"repo_name":{"kind":"string","value":"snfactory/cubefit"},"path":{"kind":"string","value":"cubefit/main.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":26267,"string":"26,267"}}},{"rowIdx":1015,"cells":{"code":{"kind":"string","value":"---\ntitle: \"Aggregates\"\ndescription: \"Reference documentation for Sensu Named Aggregates.\"\nproduct: \"Sensu Core\"\nversion: \"1.8\"\nweight: 4\nmenu:\n sensu-core-1.8:\n parent: reference\n---\n## Reference documentation\n\n- [What is a Sensu named aggregate?](#what-is-a-check-aggregate)\n - [When should named aggregates be used?](#when-should-check-aggregates-be-used)\n- [How do named aggregates work?](#how-do-check-aggregates-work)\n - [Example aggregated check result](#example-aggregated-check-result)\n- [Aggregate configuration](#aggregate-configuration)\n - [Example aggregate definition](#example-aggregate-definition)\n - [Aggregate definition specification](#aggregate-definition-specification)\n - [Aggregate `check` attributes](#aggregate-check-attributes)\n\n## What is a Sensu named aggregate? {#what-is-a-check-aggregate}\n\nSensu named aggregates are collections of [check results][1], accessible via\nthe [Aggregates API][2]. Check aggregates make it possible to treat the results\nof multiple disparate check results – executed across multiple disparate\nsystems – as a single result.\n\n### When should named aggregates be used? {#when-should-check-aggregates-be-used}\n\nCheck aggregates are extremely useful in dynamic environments and/or\nenvironments that have a reasonable tolerance for failure. Check aggregates\nshould be used when a service can be considered healthy as long as a minimum\nthreshold is satisfied (e.g. are at least 5 healthy web servers? are at least\n70% of N processes healthy?).\n\n## How do named aggregates work? {#how-do-check-aggregates-work}\n\nCheck results are included in an aggregate when a check definition includes the\n[`aggregate` definition attribute][3]. Check results that provide an\n`\"aggregate\": \"example_aggregate\"` are aggregated under the corresponding name\n(e.g. `example_aggregate`), effectively capturing multiple check results as a\nsingle aggregate.\n\n### Example aggregated check result\n\nAggregated check results are available from the [Aggregates API][2], via the\n`/aggregates/:name` API endpoint. An aggregate check result provides a\nset of counters indicating the total number of client members, checks, and\ncheck results collected, with a breakdown of how many results were recorded per\nstatus (i.e. `ok`, `warning`, `critical`, and `unknown`).\n\n{{< code json >}}\n{\n \"clients\": 15,\n \"checks\": 2,\n \"results\": {\n \"ok\": 18,\n \"warning\": 0,\n \"critical\": 1,\n \"unknown\": 0,\n \"total\": 19,\n \"stale\": 0\n }\n}\n{{< /code >}}\n\nAdditional aggregate data is available from the Aggregates API, including Sensu\nclient members of a named aggregate, and the corresponding checks which are\nincluded in the aggregate:\n\n{{< code shell >}}\n$ curl -s http://localhost:4567/aggregates/elasticsearch/clients | jq .\n[\n {\n \"name\": \"i-424242\",\n \"checks\": [\n \"elasticsearch_service\",\n \"elasticsearch_cluster_health\"\n ]\n },\n {\n \"name\": \"1-424243\",\n \"checks\": [\n \"elasticsearch_service\"\n ]\n },\n]\n{{< /code >}}\n\nAggregate data may also be fetched per check that is a member of the named\naggregate, along with the corresponding clients that are producing results for\nsaid check:\n\n{{< code shell >}}\n$ curl -s http://localhost:4567/aggregates/elasticsearch/checks | jq .\n[\n {\n \"name\": \"elasticsearch_service\",\n \"clients\": [\n \"i-424242\",\n \"i-424243\"\n ]\n },\n {\n \"name\": \"elasticsearch_cluster_health\",\n \"clients\": [\n \"i-424242\"\n ]\n }\n]\n{{< /code >}}\n\n## Aggregate configuration\n\n### Example aggregate definition\n\nThe following is an example [check definition][6], a JSON configuration file located at `/etc/sensu/conf.d/check_aggregate_example.json`.\n\n{{< code shell >}}\n{\n \"checks\": {\n \"example_check_aggregate\": {\n \"command\": \"do_something.rb -o option\",\n \"aggregate\": \"example_aggregate\",\n \"interval\": 60,\n \"subscribers\": [\n \"my_aggregate\"\n ],\n \"handle\": false\n }\n }\n}{{< /code >}}\n\n### Aggregate definition specification\n\n_NOTE: aggregates are created via the [`aggregate` Sensu `check` definition\nattribute][4]. The configuration example(s) provided above, and the\n\"specification\" provided here are for clarification and convenience only (i.e.\nthis \"specification\" is just a subset of the [check definition\nspecification][5], and not a definition of a distinct Sensu primitive)._\n\n#### Aggregate `check` attributes\n\naggregate | \n-------------|------\ndescription | Create a named aggregate for the check. Check result data will be aggregated and exposed via the [Sensu Aggregates API][2].\nrequired | false\ntype | String\nexample | {{< code shell >}}\"aggregate\": \"elasticsearch\"{{< /code >}}\n\naggregates | \n-------------|------\ndescription | An array of strings defining one or more named aggregates (described above).\nrequired | false\ntype | Array\nexample | {{< code shell >}}\"aggregates\": [ \"webservers\", \"production\" ]{{< /code >}}\n\n\nhandle | \n-------------|------\ndescription | If events created by the check should be handled.\nrequired | false\ntype | Boolean\ndefault | true\nexample | {{< code shell >}}\"handle\": false{{< /code >}}_NOTE: although there are cases when it may be helpful to aggregate check results **and** handle individual check results, it is typically recommended to set `\"handle\": false` when aggregating check results, as the [purpose of the aggregation][8] should be to act on the state of the aggregated result(s) rather than the individual check result(s)._\n\n[1]: ../checks#check-results\n[2]: ../../api/aggregates\n[3]: ../checks#check-definition-specification\n[4]: ../checks#check-attributes\n[5]: ../checks#check-definition-specification\n[6]: ../checks#check-configuration\n[7]: ../checks#standalone-checks\n[8]: #when-should-check-aggregates-be-used\n[9]: ../checks#how-are-checks-scheduled\n"},"repo_name":{"kind":"string","value":"sensu/sensu-docs"},"path":{"kind":"string","value":"archived/sensu-core/1.8/reference/aggregates.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":5864,"string":"5,864"}}},{"rowIdx":1016,"cells":{"code":{"kind":"string","value":"module IncomeTax\n module Countries\n class Morocco < Models::Progressive\n register 'Morocco', 'MA', 'MAR'\n currency 'MAD'\n\n level 30_000, '0%'\n level 50_000, '10%'\n level 60_000, '20%'\n level 80_000, '30%'\n level 180_000, '34%'\n remainder '38%'\n end\n end\nend\n"},"repo_name":{"kind":"string","value":"askl56/income-tax"},"path":{"kind":"string","value":"lib/income_tax/countries/morocco.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":310,"string":"310"}}},{"rowIdx":1017,"cells":{"code":{"kind":"string","value":"\n\n\n\t \n\t \n\t \n\tTemplate \n\n\t \n\t \n\t \n\t \n\t \n\t \n\n\t\n\t\n\n\t\n\t\n\n \n \n\n\n\n\t\n\n\t\n\t\t
\n\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t
\n\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tFebrurary 19, 2016
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t18\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t5K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t3K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tJanuary 21, 2016
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t18\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t5K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t3K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tDecember 30, 2016
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t18\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t5K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t3K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tFebrurary 19, 2016
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t18\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t5K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t3K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tJanuary 21, 2016
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t18\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t5K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t3K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tDecember 30, 2016
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t18\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t5K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t\t\t3K\n\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t
\n\t\t\t\t\t \n\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
\n\t\t\t\t\t\t\t\t
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt.
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t \n\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
2000 President
\n\t\t\t\t\t\t\t\t\t\t\t\t
Co-founder, Chairman
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
1992–1999
\n\t\t\t\t\t\t\t\t\t\t\t\t
Senior Developer
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
2000 President
\n\t\t\t\t\t\t\t\t\t\t\t\t
Co-founder, Chairman
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t \n\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
1973 – 1975
\n\t\t\t\t\t\t\t\t\t\t\t\t
Harvard University
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
1960 – 1973
\n\t\t\t\t\t\t\t\t\t\t\t\t
Lakeside Scool, Seattle
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t \n\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
74
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
Social Media Marketing
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
+74
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
67
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
Web Development
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
+82
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
25
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
Search Engine Optimization
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
+4
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
20
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
User Experience Design
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
+13
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t \n\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t \n\t\t\t\t\t \n\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\tLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\tLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\tLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\tLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\tLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\tLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t
\n\t\t\t\t\t \n\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
See all (20) \n\t\t\t\t\t\t
\n\t\t\t\t\t \n\t\t\t\t
\n\n\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
Sarah Sanchez
\n\t\t\t\t\t\t\t
Company Founder
\n\t\t\t\t\t\t\t
Greater Seattle Area
\n\t\t\t\t\t\t\t
Follow \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tConnect\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t200 \n\t\t\t\t\t\t\t\t\tConnections\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t1.9M \n\t\t\t\t\t\t\t\t\tFollowers\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\texample.com \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\tfacebook.com/example \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\tvk.com/example \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\tlinked.in/example \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\ttwitter.com/example \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t\tExport page as PDF \n\t\t\t\t\t\t\t \n\t\t\t\t\t\t \n\t\t\t\t\t \n\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Dan Cederholm
\n\t\t\t\t\t\t\t\t\t\t\t
New York
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Oykun Yilmaz
\n\t\t\t\t\t\t\t\t\t\t\t
Los Angeles
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Bill S Kenney
\n\t\t\t\t\t\t\t\t\t\t\t
Cardiff
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Maggy Smith
\n\t\t\t\t\t\t\t\t\t\t\t
Dusseldorf
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Dan Cederholm
\n\t\t\t\t\t\t\t\t\t\t\t
New York
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t
\n\t\t\t\t\t \n\t\t\t\t
\n\n\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Dan Cederholm
\n\t\t\t\t\t\t\t\t\t\t\t
Co-founder of Company
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
+ \n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Oykun Yilmaz
\n\t\t\t\t\t\t\t\t\t\t\t
Co-founder of Company
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
+ \n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Bill S Kenney
\n\t\t\t\t\t\t\t\t\t\t\t
Co-founder of Company
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
+ \n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Maggy Smith
\n\t\t\t\t\t\t\t\t\t\t\t
Co-founder of Company
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
+ \n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
Susan Andrews
\n\t\t\t\t\t\t\t\t\t\t\t
Co-founder of Company
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
+ \n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t \n\t\t\t\t\t\t
\n\n\t\t\t\t\t\t\n\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
Jackie Tran
\n\t\t\t\t\t\t\t\t\t\t
Company Founder
\n\t\t\t\t\t\t\t\t\t\t
Follow \n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
Jackie Tran
\n\t\t\t\t\t\t\t\t\t\t
Company Founder
\n\t\t\t\t\t\t\t\t\t\t
Follow \n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
Sarah Sanchez
\n\t\t\t\t\t\t\t\t\t\t
Longnameexample corporation
\n\t\t\t\t\t\t\t\t\t\t
Follow \n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t
Sarah Sanchez
\n\t\t\t\t\t\t\t\t\t\t
Longnameexample corporation
\n\t\t\t\t\t\t\t\t\t\t
Follow \n\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t \n\t\t\t\t\t \n\n\t\t\t\t\t
\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t \n\t\t\t\t
\n\t\t\t
\n\t\t
\n\t
\n\n\t\n\t\n\t \n\t\n\n\n