{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n_EOF_\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":537,"cells":{"blob_id":{"kind":"string","value":"75404d0c077babf592cac9c2eb644730916080ef"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"wkmor1/eco-dev"},"path":{"kind":"string","value":"/userconf.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":455,"string":"455"},"score":{"kind":"number","value":3.078125,"string":"3.078125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\nUSER=${USER:=rstudio}\nPASSWORD=${PASSWORD:=rstudio}\nUSERID=${USERID:=1000}\nROOT=${ROOT:=FALSE}\nexport HOME=/home/$USER\n\nuseradd -u $USERID $USER\naddgroup $USER staff\necho \"$USER:$PASSWORD\" | chpasswd\nchmod go-rx /usr/bin/passwd\nmkdir -p $HOME\n\nif [ \"$ROOT\" == \"TRUE\" ]\n then\n adduser $USER sudo && echo \"%sudo ALL=(ALL) NOPASSWD:ALL\" >> /etc/sudoers\nfi\n\necho \"PATH=${PATH}\" >> /usr/local/lib/R/etc/Renviron\n\nchown -R $USER:$USER $HOME\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":538,"cells":{"blob_id":{"kind":"string","value":"8781879a5b2c1be8ef2e3461cae16505b123ebc0"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"juanvictor/ShellScript"},"path":{"kind":"string","value":"/datos_linux/datos_linux.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3187,"string":"3,187"},"score":{"kind":"number","value":3.453125,"string":"3.453125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\n#MEMORIA RAM\n\tram_total=$(free -m | grep Mem | tr -s ' ' | cut -d ' ' -f2)\n\tram_usado=$(free -m | grep Mem | tr -s ' ' | cut -d ' ' -f3)\n\tram_usado_porcentaje=$(expr $ram_usado \\* 100 / $ram_total)\n\tram_libre_porcentaje=$(expr 100 - $ram_usado_porcentaje)\n\n#CONECTIVIDAD\n\tconectividad=\"NO\"\n\tmyping=$(ping -c 1 google.com 2> /dev/null)\n\tif [ $? -eq 0 ] ; then\n\t\t conectividad=\"OK\"\n\tfi\n\n#ESPACIO EN DISCO\n\tdisco_total=$(df -h / | grep /dev | tr -s ' ' ';' | cut -d ';' -f2)\n\tdisco_usado=$(df -h / | grep /dev | tr -s ' ' ';' | cut -d ';' -f5)\n\tdisco_usado_sin_porcentaje=$(df -h / | grep /dev | tr -s ' ' ';' | cut -d ';' -f5 | cut -d '%' -f1)\n\tdisco_libre=$(expr 100 - $disco_usado_sin_porcentaje)\n\n#CPU\n\tcantidad_cpu=$(nproc)\n\tcpu_usado=$(top -n 1 | grep %Cpu | tr -s ' ' | cut -d ' ' -f2)\n\tcpu_libre=$(top -n 1 | grep %Cpu | tr -s ' ' | cut -d ' ' -f8)\n\n#INFORMACION SISTEMA\n\tsistema=$(hostname -s)\n\tdominio=$(hostname -d)\n\tversion=$(cat /etc/issue | cut -d ' ' -f 1-3)\n\tnucleo=$(uname -r)\n\tarquitectura=$(uname -m)\n\n#USUARIOS\n\tusuarios=$(cat /etc/passwd | wc -l)\n\tusuarios_activo=$(uptime | tr -s ' ' | cut -d ' ' -f5)\n\n#PROCESOS\n\tnumero_procesos=$(top -n 1 | grep Ta | tr -s ' ' | cut -d ' ' -f2)\n\tprocesos_ejecutando=$(top -n 1 | grep Ta | tr -s ' ' | cut -d ' ' -f4)\n\tprocesos_durmiendo=$(top -n 1 | grep Ta | tr -s ' ' | cut -d ' ' -f6)\n\tprocesos_parados=$(top -n 1 | grep Ta | tr -s ' ' | cut -d ' ' -f8)\n\tprocesos_zombie=$(top -n 1 | grep Ta | tr -s ' ' | cut -d ' ' -f10)\n\n#RED\n\tred_ip=$(ip route show | grep kernel | cut -d ' ' -f9)\n\tred_mascara=$(ip route show | grep kernel | cut -d '/' -f2 | cut -d ' ' -f1)\n\tred_enlace=$(ip route show | grep default | tr -s ' ' | cut -d ' ' -f3)\n\tred_dns=$(cat /etc/resolv.conf | grep nameserver | tr -s ' ' | sed -n 1p | cut -d ' ' -f2)\t\n\t# red_bytes_tx=$(ifconfig | tr ':' ' ' | grep TX | sed -n 1p | tr -s ' ' | cut -d ' ' -f4)\n\t# red_bytes_rx=$(ifconfig | tr ':' ' ' | grep RX | sed -n 1p | tr -s ' ' | cut -d ' ' -f4)\n\necho -e \"--------------------------------------------------------\"\necho -e \"MEMORIA\\t\\t\\t| INFORMACION DEL SISTEMA\"\necho -e \"Total:\\t\\t$ram_total\\t| Sistema:\\t$sistema\"\necho -e \"Usado:\\t\\t$ram_usado_porcentaje %\\t| Dominio:\\t$dominio\"\necho -e \"Libre:\\t\\t$ram_libre_porcentaje %\\t| Versión:\\t$version\"\necho -e \"\\t\\t\\t| Núcleo:\\t$nucleo\"\necho -e \"ESPACIO EN DISCO RAIZ\\t| Arquitectura:\\t$Arquitectura\"\necho -e \"Total:\\t\\t$disco_total\\t|\"\necho -e \"Usado:\\t\\t$disco_usado\\t| USUARIOS\"\necho -e \"Libre:\\t\\t$disco_libre%\\t| Numero de usuarios:\\t$usuarios\"\necho -e \"\\t\\t\\t| Usuarios activos:\\t$usuarios_activo\"\necho -e \"CPU\\t\\t\\t| \"\necho -e \"Cantidad:\\t$cantidad_cpu\\t| PROCESOS\"\necho -e \"Usado:\\t\\t$cpu_usado %\\t| Total:\\t$numero_procesos\"\necho -e \"Inactivo:\\t$cpu_libre %\\t| Ejecutando:\\t$procesos_ejecutando\"\necho -e \"\\t\\t\\t| Durmiendo:\\t$procesos_durmiendo\"\necho -e \"\\t\\t\\t| Parados:\\t$procesos_parados\"\necho -e \"\\t\\t\\t| Zombies:\\t$procesos_zombie\"\necho -e \"RED\"\necho -e \"IP:\\t\\t$red_ip\"\necho -e \"Mascara:\\t$red_mascara\"\necho -e \"Enlace:\\t\\t$red_enlace\"\necho -e \"DNS:\\t\\t$red_dns\"\n# echo -e \"Bytes TX:\\t$red_bytes_tx\"\n# echo -e \"Bytes RX:\\t$red_bytes_rx\"\necho \"\"\necho -e \"Conectividad:\\t$conectividad\"\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":539,"cells":{"blob_id":{"kind":"string","value":"f584f7c6b0d050a616a9a98ec2a34a9a982a2371"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"vincentjoseph/docker-psysh"},"path":{"kind":"string","value":"/build.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":711,"string":"711"},"score":{"kind":"number","value":4.125,"string":"4.125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/usr/bin/env bash\n\nset -e\n\nfunction contains {\n for a in $1; do\n if [[ \"$2\" = $a ]];then\n return 0\n fi\n done\n\n return 1\n}\n\navailables=\"5.4 5.5 5.6 7.0 latest\"\nif [ \"$1\" = \"all\" ]; then\n set -- \"$availables\"\nfi\n\nfor version in $@; do\n if ! contains \"$availables\" \"$version\"; then\n echo >&2 \"$version not supported. Ignored.\"\n continue\n fi\n\n set -x\n mkdir $version\n cp -r conf $version/\n cp docker-entrypoint.sh $version/\n echo \"# generated by $(basename $0)\" > \"$version/Dockerfile\"\n sed \"s/%%VERSION%%/$version/g\" Dockerfile.template >> \"$version/Dockerfile\"\n\n docker build -t psy/psysh:$version $version\n\n rm -fr $version\ndone\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":540,"cells":{"blob_id":{"kind":"string","value":"7ef4c1c080fd01057d1ab7e07eb13b4eb94eb1cd"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"antoni/dotfiles"},"path":{"kind":"string","value":"/install/install_rust.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":308,"string":"308"},"score":{"kind":"number","value":2.828125,"string":"2.828125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/usr/bin/env bash\nset -ue\n\n# Install Rust toolchain, noninteractively\ncurl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y\n\n# Source for current session\nsource \"$HOME/.cargo/env\"\n\n# Run it, so that we have all components downloaded right away (it does so upon first launch)\ncargo --help\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":541,"cells":{"blob_id":{"kind":"string","value":"e9392e0a4a1e468c930e40ec94b137689677794b"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"kyleburton/sandbox"},"path":{"kind":"string","value":"/selenium/run-selenium-rc.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4197,"string":"4,197"},"score":{"kind":"number","value":3.453125,"string":"3.453125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"PROF=\"3o74fgym.Selenium1\"\nPROF_DIR=\"/Users/kburton/Library/Application Support/Firefox/Profiles/$PROF\"\n\nif [ -e \"$PROF_DIR\" ]; then\n echo \"copying fresh profile ($PROF) from $PROF_DIR\"\n test -d \"$PROF\" && rm -rf \"$PROF\"\n cp -r \"$PROF_DIR\" ./\nfi\n\n\n# Usage: java -jar selenium-server.jar [-interactive] [options]\n# \n# -port : the port number the selenium server should use (default 4444)\n# -timeout : an integer number of seconds before we should give up\n# -interactive: puts you into interactive mode. See the tutorial for more details\n# -singleWindow: puts you into a mode where the test web site\n# executes in a frame. This mode should only be selected if the\n# application under test does not use frames.\n# -profilesLocation: Specifies the directory that holds the profiles\n# that java clients can use to start up selenium. Currently\n# supported for Firefox only.\n# -forcedBrowserMode : sets the browser mode to a single\n# argument (e.g. \"*iexplore\") for all sessions, no matter what is\n# passed to getNewBrowserSession\n# -forcedBrowserModeRestOfLine : sets the browser mode to\n# all the remaining tokens on the line (e.g. \"*custom\n# /some/random/place/iexplore.exe\") for all sessions, no matter what\n# is passed to getNewBrowserSession\n# -userExtensions : indicates a JavaScript file that will be\n# loaded into selenium\n# -browserSessionReuse: stops re-initialization and spawning of the\n# browser between tests\n# -avoidProxy: By default, we proxy every browser request; set this\n# flag to make the browser use our proxy only for URLs containing\n# '/selenium-server'\n# -firefoxProfileTemplate : normally, we generate a fresh empty\n# Firefox profile every time we launch. You can specify a directory\n# to make us copy your profile directory instead.\n# -debug: puts you into debug mode, with more trace information and\n# diagnostics on the console\n# -browserSideLog: enables logging on the browser side; logging\n# messages will be transmitted to the server. This can affect\n# performance.\n# -ensureCleanSession: If the browser does not have user profiles,\n# make sure every new session has no artifacts from previous\n# sessions. For example, enabling this option will cause all user\n# cookies to be archived before launching IE, and restored after IE\n# is closed.\n# -trustAllSSLCertificates: Forces the Selenium proxy to trust all\n# SSL certificates. This doesn't work in browsers that don't use the\n# Selenium proxy.\n# -log : writes lots of debug information out to a log\n# file\n# -htmlSuite : Run a\n# single HTML Selenese (Selenium Core) suite and then exit\n# immediately, using the specified browser (e.g. \"*firefox\") on the\n# specified URL (e.g. \"http://www.google.com\"). You need to specify\n# the absolute path to the HTML test suite as well as the path to the\n# HTML results file we'll generate.\n# -proxyInjectionMode: puts you into proxy injection mode, a mode\n# where the selenium server acts as a proxy server for all content\n# going to the test application. Under this mode, multiple domains\n# can be visited, and the following additional flags are supported:\n# \n# -dontInjectRegex : an optional regular expression that\n# proxy injection mode can use to know when to bypss injection\n# -userJsInjection : specifies a JavaScript file which will\n# then be injected into all pages\n# -userContentTransformation : a regular\n# expression which is matched against all test HTML content; the\n# second is a string which will replace matches. These flags can\n# be used any number of times. A simple example of how this could\n# be useful: if you add \"-userContentTransformation https http\"\n# then all \"https\" strings in the HTML of the test application will\n# be changed to be \"http\".\n\njava -jar /opt/algorithmics.com/algo-connect/selenium-server-1.0.1/selenium-server.jar \\\n -firefoxProfileTemplate ./3o74fgym.Selenium1 \n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":542,"cells":{"blob_id":{"kind":"string","value":"58fafed348cafb067b2c3dbc9deddfac65a36fda"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"TonyVlcek/config-files"},"path":{"kind":"string","value":"/.bashrc"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2669,"string":"2,669"},"score":{"kind":"number","value":2.984375,"string":"2.984375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"# _______ _ ____ _ \n#|__ __| ( ) | _ \\ | | \n# | | ___ _ __ _ _|/ ___ | |_) | __ _ ___| |__ \n# | |/ _ \\| '_ \\| | | | / __| | _ < / _` / __| '_ \\ \n# | | (_) | | | | |_| | \\__ \\ | |_) | (_| \\__ \\ | | | \n# |_|\\___/|_| |_|\\__, | |___/ |____/ \\__,_|___/_| |_| \n#................ __/ | \n# |___/ \n\n# If not running interactively, don't do anything\ncase $- in\n *i*) ;;\n *) return;;\nesac\n\n#History setup\nHISTCONTROL=ignoreboth\t# ignore dublicities and space starting lines\nshopt -s histappend\t\t#append to the history file\nHISTSIZE=1000\t\t\t# history length\nHISTFILESIZE=2000\t\t# history lenght\n\nshopt -s checkwinsize\t# window size update\nshopt -s globstar\t\t# enable ** - match file in subtree\n\n# Less setup\n[ -x /usr/bin/lesspipe ] && eval \"$(SHELL=/bin/sh lesspipe)\" ## make less more friendly for non-text input files\n# Less colors\nexport LESS_TERMCAP_mb=$(printf '\\e[01;31m') # enter blinking mode - red\nexport LESS_TERMCAP_md=$(printf '\\e[01;32m') # enter double-bright mode - bold, magenta\nexport LESS_TERMCAP_me=$(printf '\\e[0m') # turn off all appearance modes (mb, md, so, us)\nexport LESS_TERMCAP_se=$(printf '\\e[0m') # leave standout mode \nexport LESS_TERMCAP_so=$(printf '\\e[01;33m') # enter standout mode - yellow\nexport LESS_TERMCAP_ue=$(printf '\\e[0m') # leave underline mode\nexport LESS_TERMCAP_us=$(printf '\\e[04;36m') # enter underline mode - cyan\n\n# set variable identifying the chroot you work in (used in the prompt below)\nif [ -z \"${debian_chroot:-}\" ] && [ -r /etc/debian_chroot ]; then\n debian_chroot=$(cat /etc/debian_chroot)\nfi\n\n# Enable programmable completion features (you don't need to enable\nif ! shopt -oq posix; then\n if [ -f /usr/share/bash-completion/bash_completion ]; then\n . /usr/share/bash-completion/bash_completion\n elif [ -f /etc/bash_completion ]; then\n . /etc/bash_completion\n fi\nfi\n\n# COLORS SETUP\n. ~/.bash_colors\n\n# Git Setup\n. ~/.git-prompt.sh\nexport GIT_PS1_SHOWDIRTYSTATE=1\nexport GIT_PS1_SHOWSTASHSTATE=1\nexport GIT_PS1_SHOWUNTRACKEDFILES=1\nexport GIT_PS1_SHOWUPSTREAM=\"auto verbose\"\n\n# GCC setup\nexport GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01' # enable colored errors\nGCC_INCLUDE_DIR=/usr/include/\nexport GCC_INCLUDE_DIR\n\n# PROMPT SETUP\nPS1=\"\\[${Yellow}\\][\\t] \\[${BGreen}\\]\\u@\\h\\[${Color_Off}\\]:\\[${BBlue}\\]\\w\\[${Yellow}\\]\"'$(__git_ps1 \"(%s)\")'\"\\[${Color_Off}\\]\\$ \"\n\n# Load ALIASES\nif [ -f ~/.bash_aliases ]; then\n . ~/.bash_aliases\nfi\n\n# xset setup\nxset r rate 200 50\t\t# speed up keyboard response\n\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":543,"cells":{"blob_id":{"kind":"string","value":"e1f0aae3c6651b583fa358d0d8b87fe7b7c10b79"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"gingerzoealex/dotfiles"},"path":{"kind":"string","value":"/.bash_profile"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1361,"string":"1,361"},"score":{"kind":"number","value":3.0625,"string":"3.0625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"on_reith () {\n [ \"BBC On Network\" = \"$(/usr/sbin/scselect 2>&1 | grep '^ \\*' | sed 's/.*(\\(.*\\))/\\1/')\" ]\n}\nif on_reith;\n\tthen\n\t echo \"On Reith\"\n\t export http_proxy=\"http://www-cache.reith.bbc.co.uk:80\"\n\t export https_proxy=\"$http_proxy\"\n\t export HTTP_PROXY=\"$http_proxy\"\n\t export HTTPS_PROXY=\"$http_proxy\"\n\t export ALL_PROXY=\"$http_proxy\"\n\t export no_proxy=localhost,127.0.0.1\n\t export NO_PROXY=$no_proxy\n\t ln -fs ~/.ssh/on-reith-config ~/.ssh/config\n\t if [[ -n $(which npm) ]]; then\n\t\t npm config set proxy $http_proxy\n\t\t npm config set https-proxy $https_proxy\n\t fi\n\t if [[ -n $(which git) ]]; then\n\t\t git config --global http.proxy $http_proxy\n\t\t git config --global https.proxy $http_proxy\n\t fi\n\telse\n\t echo \"Off Reith\"\n\t unset http_proxy\n\t unset https_proxy\n\t unset HTTP_PROXY\n\t unset HTTPS_PROXY\n\t unset ALL_PROXY\n\t unset no_proxy\n\t unset NO_PROXY\n\t ln -fs ~/.ssh/off-reith-config ~/.ssh/config\n\t if [[ -n $(which npm) ]]; then\n\t\t npm config rm proxy\n\t\t npm config rm https-proxy\n\t fi\n\t if [[ -n $(which git) ]]; then\n\t\t git config --global --unset http.proxy\n\t\t git config --global --unset https.proxy\n\t fi\n\t echo HTTP_PROXY\nfi\nexport NVM_DIR=\"$HOME/.nvm\"\n\nexport NVM_DIR=\"$HOME/.nvm\"\n[ -s \"$NVM_DIR/nvm.sh\" ] && \\. \"$NVM_DIR/nvm.sh\" # This loads nvm\n[ -s \"$NVM_DIR/bash_completion\" ] && \\. \"$NVM_DIR/bash_completion\" # This loads nvm bash_completion\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":544,"cells":{"blob_id":{"kind":"string","value":"fda3c71cfad78be2c64ebda8efeed2b4b95acc5e"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"BackupTheBerlios/sorcerer-svn"},"path":{"kind":"string","value":"/trunk/grimoire/wpa_supplicant.d/init.d/wpa_supplicant"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":6693,"string":"6,693"},"score":{"kind":"number","value":3.703125,"string":"3.703125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\n### BEGIN INIT INFO\n# Default-Mode: 500\n# Required-Start: proc wpa_priv var_run\n# Should-Start: udev\n# Required-Stop: kill\n# Default-Start: S 2 3 4 5\n# Default-Stop: 0 6\n# Short-Description: Starts wpa_supplicant for all wireless interfaces\n### END INIT INFO\n\n. /lib/lsb/init-functions\n\npnw(){\n [ -f /proc/net/wireless ] &&\n tr ' ' '\\n' < /proc/net/wireless |\n sed 's/://p;d'\n}\n\nguess_interface(){\n if [ -z \"$INTERFACE\" ]\n then INTERFACE=\"$( pnw | sed \"1p;d\" )\"\n fi\n}\n\nonly start stop configure status\nname wpa_supplicant\nserver /sbin/wpa_supplicant\nguess_interface\nconfig /etc/wpa_supplicant/wpa_supplicant.conf\noptions \"-B -Dwext -i$INTERFACE -c $CONF\"\n\nif [ \"$1\" == start ]; then\n if [ -z \"$INTERFACE\" ] || ! /sbin/ip link show grep -q \": $INTERFACE:\"; then\n log_warning_msg \"Net device is not obvious. Not starting.\"\n trap - EXIT; exit 0\n fi\nfi\n\nwrite_conf(){\n output(){\n echo \"ctrl_interface=DIR=/var/run/wpa_supplicant GROUP=wpa_priv\"\n echo \"network={\"\n echo \"ssid=\\\"$SSID\\\"\"\n echo \"scan_ssid=$SCAN_SSID\"\n echo \"key_mgmt=$KEY_MGMT\"\n echo \"psk=\\\"$PSK\\\"\"\n echo \"}\"\n }\n mkdir -p /etc/wpa_supplicant/\n local c=/etc/wpa_supplicant/wpa_supplicant.conf\n output > $c\n chmod 600 $c\n}\n\nconfigure(){\n\n save(){\n output(){\n echo \"CONFIGURED=true\"\n echo \"SSID=$SSID\"\n echo \"SCAN_SSID=$SCAN_SSID\"\n echo \"KEY_MGMT=$KEY_MGMT\"\n echo \"PSK=$PSK\"\n echo \"INTERFACE=$INTERFACE\"\n }\n\n mkdir -p /etc/init.d/conf.d\n local c=/etc/init.d/conf.d/wpa_supplicant\n\n [ -f $c ] &&\n sed -i '/CONFIGURED=/d\n /SSID=/d\n /SCAN_SSID=/d\n /KEY_MGMT=/d\n /PSK=/d\n /INTERFACE=/d' $c\n output > $c\n chmod 600 $c\n write_conf\n }\n\n get_ssid(){\n BACKTITLE=$\"WPA Configuration Menu\"\n TITLE=$\"WPA Variable Selection\"\n HELP=$\"Enter the same name for network as router uses\"\n\n SSID=\"$( dialog \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --stdout \\\n --no-cancel \\\n --inputbox \"$HELP\" 0 0 \\\n \"$SSID\"\n )\"\n }\n\n get_scan_ssid(){\n BACKTITLE=$\"WPA Configuration Menu\"\n TITLE=$\"WPA Variable Selection\"\n HELP=$\"Does the router broadcast the SSID?\"\n\n if ! [ \"$SCAN_SSID\" == 1 ]\n then local DEFAULT=--defaultno\n fi\n\n if dialog \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n $DEFAULT \\\n --yesno \"$HELP\" 0 0\n then SCAN_SSID=1\n else SCAN_SSID=0\n fi\n }\n\n get_psk(){\n BACKTITLE=$\"WPA Configuration Menu\"\n TITLE=$\"WPA Variable Selection\"\n HELP=$\"Enter same network passphrase as router uses\"\n\n PSK=\"$( dialog \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --stdout \\\n --no-cancel \\\n --inputbox \"$HELP\" 0 0 \\\n \"$PSK\"\n )\"\n }\n\n get_key_mgmt(){\n BACKTITLE=$\"WPA Configuration Menu\"\n TITLE=$\"WPA Variable Selection\"\n HELP=$\"Select type of key management router uses\"\n OTHER=$\"Menu driven configuration of non WPA-PSK key management\nis not supported by this simple menu driven configuration interface.\nPlease read the wpa_supplicant.conf manual page\nand then appropriately edit /etc/wpa_supplicant/wpa_supplicant.conf\nafter selecting the interface and exiting this configuration menu.\"\n\n KEY_MGMT=\"$( dialog \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --stdout \\\n --no-cancel \\\n --menu \"$HELP\" 0 0 0 \\\n WPA-PSK \"\" \\\n OTHER \"\"\n )\"\n\n if [ OTHER = \"$KEY_MGMT\" ]; then\n dialog --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --stdout \\\n --no-cancel \\\n --msgbox \"$OTHER\"\n fi\n }\n\n interface_available(){\n local HELP=$\"Wireless interfaces are not available.\nWhen wireless interfaces are available please re-run\n# /etc/init.d/wpa_supplicant configure\"\n\n if ! pnw | grep -q .; then\n dialog --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --stdout \\\n --no-cancel \\\n --msgbox \"$HELP\" 0 0\n false\n fi\n }\n\n get_interface(){\n BACKTITLE=$\"WPA Configuration Menu\"\n TITLE=$\"WPA Variable Selection\"\n HELP=$\"Select wireless interface name\"\n\n interface_available &&\n KEY_MGMT=\"$( dialog \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --stdout \\\n --no-cancel \\\n --default-item \"$INTERFACE\" \\\n --menu \"$HELP\" 0 0 0 \\\n $( pnw | sed -r \"s:(.*):\\1\\tInterface:\" )\n )\"\n }\n\n wpa_menu(){\n BACKTITLE=$\"WPA Configuration Menu\"\n TITLE=$\"WPA Variable Selection\"\n HELP=$\"Select field to adjust\"\n OUT=$\"Save and Exit\"\n SELECT=$\"Select\"\n\n dialog \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --stdout \\\n --ok-label \"$SELECT\" \\\n --cancel-label \"$OUT\" \\\n --menu \"$HELP\" \\\n 0 0 0 \\\n SSID \"$SSID\" \\\n SCAN_SSID \"$SCAN_SSID\" \\\n KEY_MGMT \"$KEY_MGMT\" \\\n PSK \"$PSK\" \\\n INTERFACE \"$INTERFACE\"\n }\n\n while :; do\n case $( wpa_menu ) in\n SSID)\t\tget_ssid ;;\n SCAN_SSID)\tget_scan_ssid ;;\n KEY_MGMT)\tget_key_mgmt ;;\n PSK)\t\tget_psk ;;\n INTERFACE)\tget_interface ;;\n *)\t\tsave; return 0 ;;\n esac\n done\n}\n\nnow(){\n if ! dialog --title \"WPA supplicant configuration\" --timeout 60 \\\n --yesno \"WPA supplicant can be configured now.\nOr configuration and starting later is possible\nby executing the commands:\n\n# /etc/init.d/wpa_supplicant configure;\n# /etc/init.d/wpa_supplicant start\n\nIf deployed then this prompt will not appear on the next boot.\n\nConfigure WPA supplicant now?\" 0 0\n then\n mkdir -pm 700 /etc/init.d/conf.d\n echo CONFIGURED=true > /etc/init.d/conf.d/wpa_supplicant\n false\n fi\n}\n\n# if [ \"$1\" == start ]\n# then pnw | grep -q . || exit 0\n# fi\n\n# param(){\n# local N\n# local E=/etc/wpa_supplicant\n\n# pnw |\n# while read; do\n# if [ -f $E/wpa$REPLY.conf ]\n# then CONF=\" -c $E/wpa$REPLY.conf\"\n# elif [ -f $E/wpa_supplicant.conf ]\n# then CONF=\" -c $E/wpa_supplicant.conf\"\n# fi\n# echo \"$N-Dwext -i$REPLY$CONF\"\n# N=\"-N \"\n# done\n# }\n\n# Yes this odd code at the bottom should be at the bottom\n# because it requires functions defined above.\n\nif [ start == \"$1\" ]; then\n if [ -z \"$CONFIGURED\" ] &&\n ! [ -f /etc/wpa_supplicant/wpa_supplicant.conf ]\n then now && configure\n fi\n\n if [ -n \"$SSID\" ] && ! [ -f /etc/wpa_supplicant/wpa_supplicant.conf ]\n then write_conf\n fi\nfi\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":545,"cells":{"blob_id":{"kind":"string","value":"406ae70f7481fe5cf40b9ac232abbb507be1b6af"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"Kentarou-linux/dotfiles"},"path":{"kind":"string","value":"/.zshrc"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":6570,"string":"6,570"},"score":{"kind":"number","value":3.03125,"string":"3.03125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"# Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.\n# Initialization code that may require console input (password prompts, [y/n]\n# confirmations, etc.) must go above this block; everything else may go below.\nif [[ -r \"${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh\" ]]; then\n source \"${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh\"\nfi\n\n# ~/.zshrc file for zsh interactive shells.\n# see /usr/share/doc/zsh/examples/zshrc for examples\n\nexport ZSH=\"/home/kebab/.oh-my-zsh\"\n\nZSH_THEME=\"agnoster\"\nplugins=(\n git\n zsh-autosuggestions\n zsh-syntax-highlighting\n)\n\nsource $ZSH/oh-my-zsh.sh\n\n\nsetopt autocd # change directory just by typing its name\nsetopt correct # auto correct mistakes\nsetopt interactivecomments # allow comments in interactive mode\nsetopt magicequalsubst # enable filename expansion for arguments of the form ‘anything=expression’\nsetopt nonomatch # hide error message if there is no match for the pattern\nsetopt notify # report the status of background jobs immediately\nsetopt numericglobsort # sort filenames numerically when it makes sense\nsetopt promptsubst # enable command substitution in prompt\nsetopt histignorealldups\nWORDCHARS=${WORDCHARS//\\/} # Don't consider certain characters part of the word\n\n# hide EOL sign ('%')\nPROMPT_EOL_MARK=\"\"\n\n# configure key keybindings\n#bindkey -e # emacs key bindings\nbindkey ' ' magic-space # do history expansion on space\nbindkey '^[[3;5~' kill-word # ctrl + Supr\nbindkey '^[[3~' delete-char # delete\nbindkey '^[[1;5C' forward-word # ctrl + ->\nbindkey '^[[1;5D' backward-word # ctrl + <-\nbindkey '^[[5~' beginning-of-buffer-or-history # page up\nbindkey '^[[6~' end-of-buffer-or-history # page down\nbindkey '^[[H' beginning-of-line # home\nbindkey '^[[F' end-of-line # end\nbindkey '^[[Z' undo # shift + tab undo last action\n\n# enable completion features\nautoload -Uz compinit\nautoload -Uz colors\ncolors\ncompinit -d ~/.cache/zcompdump\nzstyle ':completion:*:*:*:*:*' menu select\nzstyle ':completion:*' matcher-list 'm:{a-zA-Z}={A-Za-z}' # case insensitive tab completion\n\n# History configurations\nHISTFILE=~/.zsh_history\nHISTSIZE=10000\nSAVEHIST=10000\nsetopt hist_expire_dups_first # delete duplicates first when HISTFILE size exceeds HISTSIZE\nsetopt hist_ignore_dups # ignore duplicated commands history list\nsetopt hist_ignore_space # ignore commands that start with space\nsetopt hist_verify # show command with history expansion to user before running it\nsetopt share_history # share command history data\n\n# force zsh to show the complete history\nalias history=\"history 0\"\n\n# enable color support of ls, less and man, and also add handy aliases\nif [ -x /usr/bin/dircolors ]; then\n test -r ~/.dircolors && eval \"$(dircolors -b ~/.dircolors)\" || eval \"$(dircolors -b)\"\n alias ls='ls --color=auto'\n #alias dir='dir --color=auto'\n #alias vdir='vdir --color=auto'\n\n alias grep='grep --color=auto'\n alias fgrep='fgrep --color=auto'\n alias egrep='egrep --color=auto'\n alias diff='diff --color=auto'\n alias ip='ip --color=auto'\n\n export LESS_TERMCAP_mb=$'\\E[1;31m' # begin blink\n export LESS_TERMCAP_md=$'\\E[1;36m' # begin bold\n export LESS_TERMCAP_me=$'\\E[0m' # reset bold/blink\n export LESS_TERMCAP_so=$'\\E[01;33m' # begin reverse video\n export LESS_TERMCAP_se=$'\\E[0m' # reset reverse video\n export LESS_TERMCAP_us=$'\\E[1;32m' # begin underline\n export LESS_TERMCAP_ue=$'\\E[0m' # reset underline\n\n # Take advantage of $LS_COLORS for completion as well\n zstyle ':completion:*' list-colors \"${(s.:.)LS_COLORS}\"\n #zstyle ':completion:*' list-colors \"exfxcxdxbxegedabagacad\"\nfi\n\n# some more ls aliases\nalias ll='ls -l'\nalias la='ls -A'\nalias l='ls -CF'\nalias ma=\"cd /home/kebab/eclipse-workspace/mahjong_test/src/test\"\n\n# enable auto-suggestions based on the history\nif [ -f /usr/share/zsh-autosuggestions/zsh-autosuggestions.zsh ]; then\n . /usr/share/zsh-autosuggestions/zsh-autosuggestions.zsh\n # change suggestion color\n # ZSH_AUTOSUGGEST_HIGHLIGHT_STYLE='fg=#999'\n ZSH_AUTOSUGGEST_HIGHLIGHT_STYLE='fg=8'\nfi\n\n#追加設定\n#--------------------------------------------------------------------------------------\n\nprompt_context() {\n local user=`whoami`@`hostname`\n\n if [[ \"$user\" != \"root\" || -n \"$SSH_CONNECTION\" ]]; then\n # prompt_segment black yellow \" %(!.%{%F{black}%}.)$user \"\n prompt_segment black red \" %(!.%{%F{black}%}.)$user \"\n else\n prompt_segment red red \" %(!.%{%F{%}.) $user \"\n fi\n}\n\n# PROMPT='\n#%{%f%b%k%}$(build_prompt)\n# %B%F{yellow}❯❯%f%b '\n\n PROMPT='\n%{%f%b%k%}$(build_prompt)\n %B%F{red}❯❯%f%b '\n\nchpwd() { ls }\nset opt auto_pushd\nexport PAGER=most\nexport PATH=$PATH:/usr/local/bin/\nexport PATH=$PATH:/home/kentarou/.config/nvim/\nexport PATH=$PATH:/home/kentarou/eclipse/java-2021-03/eclipse\nexport LANG=en_US.UTF-8\nexport LANGUAGE=en_US.UTF-8\nexport LC_ALL=$LANG\n#export LANG=ja_JP.UTF-8\n\nalias c=\"clear\"\nalias -g G=\"|grep\"\nalias user=\"ssh root@192.168.1.34\"\nalias 2=\"terminator -l 2 && exit\"\nalias 3=\"terminator -l 3 && exit\"\nalias 4=\"terminator -l 4 && exit\"\nalias dev=\"terminator -l dev && exit\"\nalias dev2=\"terminator -l dev2 && exit\"\nalias init.vim=\"nvim ~/.config/nvim/init.vim\"\nalias dein.toml=\"nvim ~/.config/nvim/dein.toml\"\nalias status=\"systemctl status\"\nalias start=\"systemctl start\"\nalias stop=\"systemctl stop\"\nalias restart=\"systemctl restart\"\nalias vim=\"nvim\"\n#alias blue=\"gnome-control-center bluetooth\"\n#alias blue=\"bluetoothctl connect 00:00:00:00:58:CA\nalias e=\"exit\"\nalias wifi=\"nmcli device wifi list\"\nalias ma=\"cd eclipse-workspace/Mahjong_test/src/test\"\n#google検索\ngs() {\n google-chrome https://www.google.com/search?q=\"$*&hl=en\"\n}\n\n#google翻訳\ngt() {\n echo -n \"$*\" \": \">> ~/.eng_history &&\n trans -b :ja \"$*\" | tee -a ~/.eng_history \n}\n\n# gt() {\n# trans -b :ja \"$*\"\n# }\n\nerror() {\n $* |& read -d'あ' error ; gt $error\n}\n\nblue(){\n bluetoothctl connect 00:00:00:00:58:CA\n result=`echo $?`\n if [ $result -eq 0 ]\n then\n exit\n fi\n}\n\n\n\n\nalias wifi=\"nmcli device wifi connect \"SPWN_N36_586cae\" password 8c98718c67e60 ifname wlp2s0\"\nexport PATH=$PATH:/home/kebab/eclipse/jee-2021-03/eclipse\n\n# cat /proc/cpuinfo\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":546,"cells":{"blob_id":{"kind":"string","value":"0636e393bbf800e3e77f06afd6425185ceec9fd0"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"actiago/restic-systemd-automatic-backup"},"path":{"kind":"string","value":"/bin/restic_check.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1974,"string":"1,974"},"score":{"kind":"number","value":4.15625,"string":"4.15625"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/usr/bin/env bash\n# Check the backups made with restic to Backblaze B2 for errors.\n# See restic_backup.sh on how this script is run (as it's analogous for this script).\n\nset -o errexit\nset -o pipefail\n[[ \"${TRACE-0}\" =~ ^1|t|y|true|yes$ ]] && set -o xtrace\n\n# Clean up lock if we are killed.\n# If killed by systemd, like $(systemctl stop restic), then it kills the whole cgroup and all it's subprocesses.\n# However if we kill this script ourselves, we need this trap that kills all subprocesses manually.\nexit_hook() {\n\techo \"In exit_hook(), being killed\" >&2\n\tjobs -p | xargs kill\n\trestic unlock\n}\ntrap exit_hook INT TERM\n\n# Assert that all needed environment variables are set.\nassert_envvars() {\n\tlocal varnames=(\"$@\")\n\tfor varname in \"${varnames[@]}\"; do\n\t\tif [ -z ${!varname+x} ]; then\n\t\t\tprintf \"%s must be set for this script to work.\\n\\nDid you forget to source a $INSTALL_PREFIX/etc/restic/*.env.sh profile in the current shell before executing this script?\\n\" \"$varname\" >&2\n\t\t\texit 1\n\t\tfi\n\tdone\n}\n\nwarn_on_missing_envvars() {\n\tlocal unset_envs=()\n\tlocal varnames=(\"$@\")\n\tfor varname in \"${varnames[@]}\"; do\n\t\tif [ -z \"${!varname}\" ]; then\n\t\t\tunset_envs=(\"${unset_envs[@]}\" \"$varname\")\n\t\tfi\n\tdone\n\n\tif [ ${#unset_envs[@]} -gt 0 ]; then\n\t\tprintf \"The following env variables are recommended, but have not been set. This script may not work as expected: %s\\n\" \"${unset_envs[*]}\" >&2\n\tfi\n}\n\nassert_envvars\\\n\tRESTIC_PASSWORD_FILE RESTIC_REPOSITORY RESTIC_VERBOSITY_LEVEL\n\nwarn_on_missing_envvars \\\n\tB2_ACCOUNT_ID B2_ACCOUNT_KEY B2_CONNECTIONS\n\nB2_ARG=\n[ -z \"${B2_CONNECTIONS+x}\" ] || B2_ARG=(--option b2.connections=\"$B2_CONNECTIONS\")\n\n# Remove locks from other stale processes to keep the automated backup running.\n# NOTE nope, don't unlock like restic_backup.sh. restic_backup.sh should take precedence over this script.\n#restic unlock &\n#wait $!\n\n# Check repository for errors.\necho restic check \\\n\t\"${B2_ARG[@]}\" \\\n\t--verbose=\"$RESTIC_VERBOSITY_LEVEL\" &\nwait $!\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":547,"cells":{"blob_id":{"kind":"string","value":"99c6d4251f217437d4db0499b25dddb09feedbae"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"jspawar/dotfiles"},"path":{"kind":"string","value":"/modules/dotfiles/bash/.bashrc"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":72,"string":"72"},"score":{"kind":"number","value":2.875,"string":"2.875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"for file in \"${HOME}\"/.config/bash/*.bash; do\n source \"${file}\"\ndone\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":548,"cells":{"blob_id":{"kind":"string","value":"d2784d404d76441188482831750237f1cc275f6d"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"tano-systems/meta-tanowrt"},"path":{"kind":"string","value":"/meta-tanowrt/recipes-core/base-files/base-files/files/sysfixtime.init"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2695,"string":"2,695"},"score":{"kind":"number","value":3.828125,"string":"3.828125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["MIT","LicenseRef-scancode-unknown"],"string":"[\n \"MIT\",\n \"LicenseRef-scancode-unknown\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/sh /etc/rc.common\n# Copyright (C) 2013-2014 OpenWrt.org\n# Copyright (C) 2018-2022 Tano Systems LLC\n\nSTART=00\nSTOP=90\n\nHWCLOCK=/sbin/hwclock\n\nextra_command \"systohc\" \"Save system time to hardware RTC\"\nextra_command \"hctosys\" \"Load system time from hardware RTC\"\nextra_command \"systz\" \"Apply timezone\"\n\nLOG=\"logger -t sysfixtime -p\"\n\nboot() {\n\thwclock_call restore\n\n\tlocal maxtime=\"$(get_maxtime)\"\n\tlocal curtime=\"$(date +%s)\"\n\tif [ $curtime -lt $maxtime ]; then\n\t\tdate -s @$maxtime\n\t\thwclock_call save\n\tfi\n}\n\napply_timezone() {\n\tlocal config=\"$1\"\n\tlocal zonename\n\tlocal kept_in_localtime\n\n\tconfig_get zonename \"$config\" 'zonename' 'UTC'\n\tconfig_get_bool kept_in_localtime 'rtc' 'hwclock_localtime' 1\n\n\t[ -n \"$zonename\" ] && [ -f \"/usr/share/zoneinfo/$zonename\" ] && \\\n\t\tln -sf \"/usr/share/zoneinfo/$zonename\" /tmp/localtime\n\n\t# apply timezone to kernel\n\tRET=\"0\"\n\tif [ \"$kept_in_localtime\" = \"0\" ]; then\n\t\t${HWCLOCK} -u --systz\n\t\tRET=\"$?\"\n\telse\n\t\t${HWCLOCK} -l --systz\n\t\tRET=\"$?\"\n\tfi\n\n\tif [ \"${RET}\" = \"0\" ]; then\n\t\t$LOG daemon.info \"applied time zone '$zonename'\"\n\telse\n\t\t$LOG daemon.error \"failed to apply time zone '$zonename' (${RET})\"\n\tfi\n}\n\nhwclock_call() {\n\tlocal args rtc_dev kept_in_localtime\n\n\tconfig_load 'system'\n\tconfig_get rtc_dev 'rtc' 'hwclock_dev' '/dev/rtc0'\n\tconfig_get_bool kept_in_localtime 'rtc' 'hwclock_localtime' 1\n\n\t# Early apply timezone from system configuration\n\tconfig_foreach apply_timezone system\n\n\targs=\"\"\n\n\tif [ \"$kept_in_localtime\" = \"0\" ]; then\n\t\t# -u|--utc\n\t\tappend args \"-u\" \" \"\n\telse\n\t\t# -l|--localtime\n\t\tappend args \"-l\" \" \"\n\tfi\n\n\t# -f|--rtc\n\tappend args \"-f $rtc_dev\" \" \"\n\n\tif [ \"$1\" = \"save\" ]; then\n\t\t# -w|--systohc\n\t\tappend args \"-w\" \" \"\n\telif [ \"$1\" = \"restore\" ]; then\n\t\t# -s|--hctosys\n\t\tappend args \"-s\" \" \"\n\tfi\n\n\tRET=\"0\"\n\tif [ ! -e \"$rtc_dev\" ]; then\n\t\tRET=\"no $rtc_dev device\"\n\telif [ ! -e \"$HWCLOCK\" ]; then\n\t\tRET=\"no $HWCLOCK\"\n\telse\n\t\t$HWCLOCK $args\n\t\tRET=\"$?\"\n\tfi\n\n\tif [ \"$1\" = \"save\" ]; then\n\t\tif [ \"$RET\" = \"0\" ]; then\n\t\t\t$LOG daemon.info \"saved localtime ($(date)) to $rtc_dev\"\n\t\telse\n\t\t\t$LOG daemon.error \"failed to save localtime to $rtc_dev ($RET)\"\n\t\tfi\n\telse\n\t\tif [ \"$RET\" = \"0\" ]; then\n\t\t\t$LOG daemon.info \"set localtime ($(date)) from $rtc_dev\"\n\t\telse\n\t\t\t$LOG daemon.error \"failed to set localtime from $rtc_dev ($RET)\"\n\t\tfi\n\tfi\n}\n\nsystohc() {\n\thwclock_call save\n}\n\nhctosys() {\n\thwclock_call restore\n}\n\nsystz() {\n\tconfig_load 'system'\n\tconfig_foreach apply_timezone system\n}\n\nstart() {\n\thwclock_call restore\n}\n\nstop() {\n\thwclock_call save\n}\n\nget_maxtime() {\n\tlocal file newest\n\n\tfor file in $( find /etc -type f ) ; do\n\t\t[ -z \"$newest\" -o \"$newest\" -ot \"$file\" ] && newest=$file\n\tdone\n\t[ \"$newest\" ] && date -r \"$newest\" +%s\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":549,"cells":{"blob_id":{"kind":"string","value":"7abbb9f1e091ac126aa3cc353111b0f245f3de5d"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"SixSq/dataClay"},"path":{"kind":"string","value":"/dataclay-proxy/release.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1886,"string":"1,886"},"score":{"kind":"number","value":3.15625,"string":"3.15625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-warranty-disclaimer","Apache-2.0"],"string":"[\n \"LicenseRef-scancode-warranty-disclaimer\",\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/bash\n\n# \n# Copyright (c) 2018, SixSq Sarl\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n# http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied. See the License for the specific language governing\n# permissions and limitations under the License.\n# \n\nif [[ $# -eq 0 ]] ; then\n echo 'Please pass the Docker image tag version as an argument'\n exit 0\nfi\n\n(cd ../orchestration/ && docker-compose up -d)\n\nTOOLSPATH=../tool/dClayTool.sh\n\nuntil $TOOLSPATH GetDataClayID \ndo \n echo \" --- waiting for dataclay\"\n sleep 2\ndone\n\npassword=`echo $(uuidgen || cat /dev/urandom) | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1`\n\nsed -i '.orig' \"s/cHaNgEmE/$password/g\" registerModel_v2.sh\n./registerModel_v2.sh\n\n./buildApp.sh\n\nlein do clean, uberjar\n\nmv cfgfiles/client.properties cfgfiles/client.properties.orig\ncat >cfgfiles/client.properties <&2 echo \"ERROR: \\$GRASP_BINDIR variable is unset.\"\n\texit 127\nfi\nGRASP_BINDIR=`realpath \"${GRASP_BINDIR}\"`\nif ! [ -d \"${GRASP_BINDIR}\" ]; then\n\t>&2 echo \"ERROR: \\$GRASP_BINDIR is not a diretory.\"\n\t>&2 echo \" GRASP_BINDIR=$GRASP_BINDIR\"\n\texit 127\nfi\n\n# If the user passes \"--preserve-tmp\" as the first argument, we'll note that down for later.\nif [ \"$1\" == \"--preserve-tmp\" ]; then\n\tpreserve_tmp=true\n\tshift\nfi\n\n# Get the case name. The user should pass only one argument.\nif [ \"$#\" -ne 1 ]; then\n\t>&2 echo \"ERROR: Invalid number of arguments passed ($#)\"\n\texit 127\nfi\nCASE=$1\nif [ \"$CASE\" != \"serial\" ] && [ \"$CASE\" != \"mpi\" ] && [ \"$CASE\" != \"mpi-longpath\" ]; then\n\t>&2 echo \"ERROR: Invalid configuration passed ($CASE)\"\n\t>&2 echo \" expected one of: serial, mpi, mpi-longpath\"\n\texit 125\nfi\n\n# Determine the paths to the necessary binaries\nfunction checkbinary {\n\tvarname=$1\n\tpath=$2\n\tif ! [ -f \"${path}\" ]; then\n\t\t>&2 echo \"ERROR: Unable to find binary $varname\"\n\t\t>&2 echo \" at ${path}\"\n\t\texit 127\n\tfi\n\t>&2 echo \"INFO: $varname=${path}\"\n}\nRNUCLEUS=\"${GRASP_BINDIR}/rnucleus\"; checkbinary RNUCLEUS $RNUCLEUS\nRCSFGENERATE=\"${GRASP_BINDIR}/rcsfgenerate\"; checkbinary RCSFGENERATE $RCSFGENERATE\nRWFNESTIMATE=\"${GRASP_BINDIR}/rwfnestimate\"; checkbinary RWFNESTIMATE $RWFNESTIMATE\nif [[ $CASE =~ ^mpi ]]; then\n\tcheckbinary RANGULAR \"${GRASP_BINDIR}/rangular_mpi\"\n\tcheckbinary RMCDHF \"${GRASP_BINDIR}/rmcdhf_mpi\"\n\tRANGULAR=\"mpirun -n 4 ${GRASP_BINDIR}/rangular_mpi\"\n\tRMCDHF=\"mpirun -n 4 ${GRASP_BINDIR}/rmcdhf_mpi\"\nelse\n\tRANGULAR=\"${GRASP_BINDIR}/rangular\"; checkbinary RANGULAR $RANGULAR\n\tRMCDHF=\"${GRASP_BINDIR}/rmcdhf\"; checkbinary RMCDHF $RMCDHF\nfi\n\n# Create a temporary directory to run GRASP in:\nTMP=`mktemp -d grasp-test-mpitmp.XXXXXXXXX` || exit 120\nTMP=`realpath \"${TMP}\"`\nif ! [ -d \"${TMP}\" ]; then\n\t>&2 echo \"ERROR: Temporary directory as not created.\"\n\t>&2 echo \" TMP=$TMP\"\n\texit 121\nfi\n# This will be called any time we exit, independent of whether it's an early exit due to a\n# failure, or the final exit when tests pass.\nfunction clean_up_tmp_directory {\n\t# Keep the temporary directory around if the user passed --preserve-tmp\n\tif [ \"$preserve_tmp\" == \"true\" ]; then\n\t\t>&2 echo \"INFO: Keeping temporary directory ${TMP}\"\n\telse\n\t\t>&2 echo \"INFO: Removing temporary directory ${TMP}\"\n\t\trm -vR \"${TMP}\"\n\tfi\n}\ntrap clean_up_tmp_directory EXIT\n>&2 echo \"INFO: switching to temporary directory: ${TMP}\"\ncd \"${TMP}\" || exit 122\n\n# Function to test existence of a generated file:\nfunction test_file_exists {\n\tif ! [ -f \"$1\" ]; then\n\t\t>&2 echo \"ERROR: failed to generate file $1\"\n\t\texit 50\n\tfi\n}\n\n# Run rnucleus to generate a simple isodata file\n${RNUCLEUS} <<-EOF\n\t92\n\t238\n\tn\n\t238.02891\n\t0\n\t0\n\t0\nEOF\nexitcode=$?\nif ! [ $exitcode -eq 0 ]; then\n\t>&2 echo \"ERROR: rnucleus failed with $exitcode\"\n\texit 1\nfi\ntest_file_exists \"isodata\"\n\n# Run rcsfgenerate to generate a simple CSL\n${RCSFGENERATE} <<-EOF\n\t*\n\t0\n\t1s(2,*)2s(2,*)\n\n\t2s,2p\n\t0,2\n\t2\n\tn\nEOF\nexitcode=$?\nif ! [ $exitcode -eq 0 ]; then\n\t>&2 echo \"ERROR: rcsfgenerate failed with $exitcode\"\n\texit 1\nfi\ntest_file_exists \"rcsf.out\"\nmv rcsf.out rcsf.inp || exit 2\ntest_file_exists \"rcsf.inp\"\n\n# Run rwfnestimate to generate basic orbitals\n${RWFNESTIMATE} <<-EOF\n\ty\n\t2\n\t*\nEOF\nexitcode=$?\nif ! [ $exitcode -eq 0 ]; then\n\t>&2 echo \"ERROR: rwfnestimate failed with $exitcode\"\n\texit 1\nfi\ntest_file_exists \"rwfn.inp\"\n\n# Set up MPI_TMP on MPI cases\nfunction strlen {\n\techo -n $1 | wc -c\n}\nif [[ $CASE =~ ^mpi ]]; then\n\texport MPI_TMP=`mktemp -d`\n\tfunction clean_up_mpitmp {\n\t\trm -Rv ${MPI_TMP}\n\t}\n\ttrap clean_up_mpitmp EXIT\n\tif [ \"$CASE\" == \"mpi-longpath\" ]; then\n\t\texport MPI_TMP=\"$MPI_TMP/mpitmp\"\n\t\twhile [ `strlen $MPI_TMP` -lt 80 ]; do\n\t\t\texport MPI_TMP=\"${MPI_TMP}-qwertyuiop1234567890\"\n\t\tdone\n\t\tmkdir \"${MPI_TMP}\" || exit 5\n\tfi\n\techo \"MPI_TMP=$MPI_TMP ($(strlen $MPI_TMP) characters)\"\nfi\n\n# Run rangular\necho \"Running: ${RANGULAR}\"\n${RANGULAR} <<-EOF\n\ty\nEOF\nexitcode=$?\nif ! [ $exitcode -eq 0 ]; then\n\t>&2 echo \"ERROR: rangular failed with $exitcode\"\n\texit 1\nfi\n\n# Run rmcdhf\necho \"Running: ${RMCDHF}\"\n${RMCDHF} <<-EOF\n\ty\n\t1\n\t1\n\t5\n\t*\n\t*\n\t20\nEOF\nexitcode=$?\nif ! [ $exitcode -eq 0 ]; then\n\t>&2 echo \"ERROR: rmcdhf failed with $exitcode\"\n\texit 1\nfi\ntest_file_exists \"rmcdhf.sum\"\ntest_file_exists \"rmix.out\"\n\necho \"INFO: Final directory contents:\"\nls -Alh\n\n# If we got this far, everything is a-ok\n>&2 echo \"TESTS SUCCEEDED\"\nexit 0\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":554,"cells":{"blob_id":{"kind":"string","value":"784c35aa18e0c6ca35de8550fd891b7cd7991f63"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"rjzupkoii/PSU-CIDD-Malaria-Simulation"},"path":{"kind":"string","value":"/config.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2754,"string":"2,754"},"score":{"kind":"number","value":3.921875,"string":"3.921875"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/usr/bin/env bash\n\nfunction check_version() { test \"$(printf '%s\\n' \"$@\" | sort -V | head -n 1)\" != \"$1\"; }\n\n# Define some paths\nBUILD_ENV=\"$HOME/work/build_env\"\n\n# Load cmake\nmodule load cmake\n\n# Create the directory to work in\nsource=$(pwd)\nmkdir -p $BUILD_ENV\n\n# If PostgreSQL isn't present, download and install it to the user directory\nif [ ! -d \"$BUILD_ENV/postgres\" ]; then\n # Get the source files\n cd $BUILD_ENV\n wget https://github.com/postgres/postgres/archive/master.zip\n unzip master.zip\n rm master.zip\n\n # Build the source files\n cd postgres-master\n ./configure --prefix=$BUILD_ENV/postgres\n make -j 8\n make install\n cd ..\n rm -rf $BUILD_ENV/postgres-master\n\n # Export the relevent variables \n export PATH=\"$BUILD_ENV/postgres/bin:$PATH\"\n export PKG_CONFIG_PATH=\"$BUILD_ENV/postgres/lib/pkgconfig:$PKG_CONFIG_PATH\"\n export LIBRARY_PATH=\"$BUILD_ENV/postgres/lib:$LIBRARY_PATH\"\n\n # Prepare the libpqxx library, limit to version 7.0.0\n git clone https://github.com/jtv/libpqxx.git\n cd libpqxx\n git checkout 7.0.0\n ./configure --disable-documentation --prefix=$BUILD_ENV/lib\n make -j 8\n make install\nfi\n\n# If vcpkg doesn't already exist as a directory, load it\nif [ ! -d \"$BUILD_ENV/vcpkg\" ]; then\n cd $BUILD_ENV\n wget https://github.com/microsoft/vcpkg/archive/2019.08.tar.gz\n tar xf 2019.08.tar.gz\n rm 2019.08.tar.gz\n mv vcpkg-2019.08 vcpkg\n cd vcpkg\n ./bootstrap-vcpkg.sh\nfi\n\n# Load the relevent packages\ncd $BUILD_ENV/vcpkg\n./vcpkg install yaml-cpp fmt date args\n\n# Return to the source directory\ncd $source\n\n# Load GSL so we can set the correct path\nmodule use /storage/icds/RISE/sw8/modules\nmodule load gsl\n\n# Create the build script\nif [ ! -d \"build\" ]; then\n mkdir -p build\n cd build\n toolchain=\"$BUILD_ENV/vcpkg/scripts/buildsystems/vcpkg.cmake\"\n echo \"module use /storage/icds/RISE/sw8/modules\" >> build.sh\n echo \"module load gsl\" >> build.sh\n echo \"module load cmake\" >> build.sh\n echo \"export GSL_ROOT_DIR=`gsl-config --prefix`\" >> build.sh\n echo \"export PATH=$PATH\" >> build.sh\n echo \"export LIBRARY_PATH=$BUILD_ENV/postgres/lib:$BUILD_ENV/lib/lib:$LIBRARY_PATH\" >> build.sh\n echo \"cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE=$toolchain -DBUILD_CLUSTER:BOOL=true ..\" >> build.sh\n echo \"make -j 8\" >> build.sh\n chmod +x build.sh\nfi\n\n# Notify the user of recommended .bashrc changes\nBIWhite='\\033[1;97m'\nDEFAULT='\\033[0m'\n\nLIBPQ=~/work/build_env/postgres/lib/\necho -e \"${BIWhite}Configuration complete, you may wish to update ~/.bashrc with the following:${DEFAULT}\\n\"\necho \" # Configure runtime environment\"\necho \" module use /storage/icds/RISE/sw8/modules\"\necho \" module load gsl\"\necho \" LD_LIBRARY_PATH=\\$LD_LIBRARY_PATH:`dirname $LIBPQ`/`basename $LIBPQ`\"\necho\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":555,"cells":{"blob_id":{"kind":"string","value":"a25925e3ca923a0a56fe36a89878131ca38a59a3"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"jaredjennings/cmits-unclass"},"path":{"kind":"string","value":"/modules/stig_misc/files/login_history/gdm-post-login.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":209,"string":"209"},"score":{"kind":"number","value":2.59375,"string":"2.59375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/sh\n# Fulfill AFMAN 33-223, section 5.5.2, and UNIX SRG rules GEN000452 and\n# GEN000454.\ntext=\"`/usr/sbin/loginhistory $LOGNAME`\"\n[[ \"$text\" =~ \\! ]] && sw=--error || sw=--info\nzenity $sw --text=\"$text\"\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":556,"cells":{"blob_id":{"kind":"string","value":"2bd9b4095a115569378c09822197e3bd334e594e"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"hashicorp-demoapp/instruqt"},"path":{"kind":"string","value":"/packer/bootstrap.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2614,"string":"2,614"},"score":{"kind":"number","value":3.140625,"string":"3.140625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\nset -e\n\nexport HOME=/root\n\n# Hack to make sure we don't start installing packages until the filesystem is available.\necho \"waiting 180 seconds for cloud-init to update /etc/apt/sources.list\"\ntimeout 180 /bin/bash -c \\\n 'until stat /var/lib/cloud/instance/boot-finished 2>/dev/null; do echo waiting ...; sleep 1; done'\n\n# Install packages.\nexport DEBIAN_FRONTEND=noninteractive\napt-get update\napt-get -y install \\\n apt-transport-https \\\n ca-certificates \\\n software-properties-common \\\n git curl wget \\\n conntrack socat \\\n inotify-tools \\\n unzip \\\n make golang-go \\\n jq vim nano emacs joe \\\n bash-completion\n\ncurl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -\nadd-apt-repository \\\n \"deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable\"\napt update\napt-get -y install \\\n docker-ce \\\n docker-ce-cli \\\n containerd.io\n\n# Make sure SSH does not break.\napt-get -y remove sshguard\n\n# Disable auto updates as they break things.\nsystemd-run --property=\"After=apt-daily.service apt-daily-upgrade.service\" --wait /bin/true\nsystemctl mask apt-daily.service apt-daily-upgrade.service\n\n# Improve the startup sequence\ncp /tmp/google-startup-scripts.service /etc/systemd/system/multi-user.target.wants/google-startup-scripts.service\n\n# Start Docker, in case we need to pre-pull images in derivatives of this image.\nsystemctl daemon-reload\nsystemctl enable docker\nsystemctl start docker\n\nVERSION=1.5.0\nOS=linux\nARCH=amd64\ncurl -fsSL \"https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v${VERSION}/docker-credential-gcr_${OS}_${ARCH}-${VERSION}.tar.gz\" \\\n | tar xz --to-stdout ./docker-credential-gcr \\\n > /usr/bin/docker-credential-gcr && chmod +x /usr/bin/docker-credential-gcr\n\ndocker-credential-gcr configure-docker\n\n# Install shipyard\ncurl https://shipyard.run/install | bash\n\n# Run the blueprint\nshipyard run github.com/hashicorp-demoapp/infrastructure//blueprint\n\n# Replace with a nice check at some point\nsleep 60\n\n# Pause the application\nshipyard pause\n\n# Install Tools\n\n## Install Vault\nwget https://releases.hashicorp.com/vault/1.3.1/vault_1.3.1_linux_amd64.zip\nunzip vault_1.3.1_linux_amd64.zip\nmv vault /usr/bin\n\n## Install Consul\nwget https://releases.hashicorp.com/consul/1.6.2/consul_1.6.2_linux_amd64.zip\nunzip consul_1.6.2_linux_amd64.zip\nmv consul /usr/bin\n\n## Install Kubectl \ncurl -LO https://storage.googleapis.com/kubernetes-release/release/`curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt`/bin/linux/amd64/kubectl\nchmod +x kubectl\nmv kubectl /usr/bin"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":557,"cells":{"blob_id":{"kind":"string","value":"47e3677fc165d09c343e51397c66a5cfa1bcb603"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"danielxiaowxx/generator-ionic"},"path":{"kind":"string","value":"/generators/update-project/templates/release-android-app.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1101,"string":"1,101"},"score":{"kind":"number","value":2.875,"string":"2.875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/usr/bin/env bash\ngulp clean\ngulp release\nmv www www_back\nmv release/www www\n\nionic plugin rm cordova-plugin-console\n\n#多渠道-开始\nmkdir ./tmp\nmv ./platforms/android/AndroidManifest.xml ./tmp/\necho '' > release.log\n\nfor version in 'YingYongBao' 'Test'\ndo\n cp -f ./tmp/AndroidManifest.xml ./platforms/android/\n sed -i -- \"s/Channel_ID/$version/g\" ./platforms/android/AndroidManifest.xml\n cat ./platforms/android/AndroidManifest.xml | grep $version >> release.log\n\n ionic build --release android\n jarsigner -verbose -sigalg SHA1withRSA -digestalg SHA1 -keystore <%= appName %>-release-key.keystore -storepass $<%= snakeCaseAppName %>_storepass -keypass $<%= snakeCaseAppName %>_keypass platforms/android/build/outputs/apk/android-release-unsigned.apk <%= appName %>\n $ANDROID_HOME/build-tools/23.0.1/zipalign -v 4 platforms/android/build/outputs/apk/android-release-unsigned.apk release/<%= appName %>-$version.apk\ndone\n\nmv -f ./tmp/AndroidManifest.xml ./platforms/android/\nrm -rf ./tmp\n#多渠道-结束\n\nionic plugin add cordova-plugin-console\n\nrm -rf www\nmv www_back www\n\ncat release.log"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":558,"cells":{"blob_id":{"kind":"string","value":"272fd256618f33f416fe9782c951835077a26ae4"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"ThomasAdam/tmux-ARCHIVED"},"path":{"kind":"string","value":"/tags/TMUX_1_4/tools/fix-ids.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":228,"string":"228"},"score":{"kind":"number","value":2.890625,"string":"2.890625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"# $Id: fix-ids.sh,v 1.3 2009-07-01 19:03:34 nicm Exp $\n\nfor i in *.[ch] tmux.1; do\n (head -1 $i|grep '$OpenBSD' >/dev/null) || continue\n mv $i $i~ || exit\n sed 's/\\$OpenBSD.* \\$/$\\Id$/' $i~ >$i || exit\n echo $i\ndone\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":559,"cells":{"blob_id":{"kind":"string","value":"c384685ee2f1516192b52a6dbbd49721f4c9798d"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"lastweek/rdma_bench_dirty"},"path":{"kind":"string","value":"/rc-swarm/sweep.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":856,"string":"856"},"score":{"kind":"number","value":3.328125,"string":"3.328125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"# A function to echo in blue color\nfunction blue() {\n\tes=`tput setaf 4`\n\tee=`tput sgr0`\n\techo \"${es}$1${ee}\"\n}\n\n# Sweep over params\n# This is separate from run-all.sh, which does not sweep\n\n# Empty existing sweep output\nrm -f sweep/temp_out\n\n# 6 machines on NetApp, so increment NUM_WORKERS by 6\nfor VM_PER_MACHINE in 1 2 3 4 5 6 7 8 9; do\n\tfor WINDOW_SIZE in `seq 8 8 32`; do\n\t\tfor UNSIG_BATCH in 1; do\n\t\t\tfor NUM_WORKERS in 154; do\n\t\t\t\t# Do work for these params\n\t\t\t\trm sweep.h\n\t\t\t\ttouch sweep.h\n\n\t\t\t\techo \"#define SIZE 32\" >> sweep.h\n\t\t\t\techo \"#define VM_PER_MACHINE $VM_PER_MACHINE\" >> sweep.h\n\t\t\t\techo \"#define WINDOW_SIZE $WINDOW_SIZE\" >> sweep.h\n\t\t\t\techo \"#define NUM_WORKERS $NUM_WORKERS\" >> sweep.h\n\t\t\t\techo \"#define UNSIG_BATCH $UNSIG_BATCH\" >> sweep.h\n\n\t\t\t\tmake clean\n\t\t\t\tmake\n\n\t\t\t\tblue \"Starting run\"\n\t\t\t\t./run-all.sh\n\t\t\tdone\n\t\tdone\n\tdone\ndone\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":560,"cells":{"blob_id":{"kind":"string","value":"9c6b41dd3b492bce38174e7ad041ba40585935a5"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"kinglionsoft/farmer"},"path":{"kind":"string","value":"/Micro/docker/gdi/build-docker-base-image.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1259,"string":"1,259"},"score":{"kind":"number","value":3.5625,"string":"3.5625"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/bash\n\n# Create GDI+\\TTF images for .NetCore\n\nimages=(runtime:2.1 runtime:2.2 runtime:3.0 aspnet:2.1 aspnet:2.2 aspnet:3.0 aspnet:3.1)\nofficial=mcr.microsoft.com/dotnet/core/\nyx=registry.local.com/dotnetcore/\nproxy=http://192.168.1.123:11080/\n\nrm -r gdi\nmkdir gdi\ncp sources.list ./gdi\nfor img in ${images[*]}\ndo\n # GDI+\n gdi=$yx$(echo $img | sed 's/:/-gdi:/')\n echo '========================='\n echo building $gdi\n if [[ \"$(docker image ls -q $gdi 2> /dev/null)\" != \"\" ]]; then\n echo $gdi exists\n else\n pushd gdi\n tee Dockerfile << EOF\nFROM $official$img\nADD sources.list /etc/apt/\nRUN apt-get update \\\n&& apt-get install -y --allow-unauthenticated \\\n libc6-dev \\\n libgdiplus \\\n libx11-dev \\\n && rm -rf /var/lib/apt/lists/*\nEOF\n docker build -t $gdi .\n rm Dockerfile\n popd\n fi\n\n # TTF\n echo '========================='\n ttf=$yx$(echo $img | sed 's/:/-ttf:/')\n echo building $ttf\n if [[ \"$(docker image ls -q $ttf 2> /dev/null)\" != \"\" ]]; then\n echo $ttf exists\n else\n pushd font\n tee Dockerfile << EOF \nFROM $gdi\nCOPY ttf/* /usr/share/fonts/winFonts/ \nEOF\n docker build -t $ttf .\n rm Dockerfile\n popd\n fi\ndone\n\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":561,"cells":{"blob_id":{"kind":"string","value":"3413a3b2fd40dad073251fc056a2872f707ce2cc"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"alexalmansa/rbacF2"},"path":{"kind":"string","value":"/rbac-0.3/rbacb/rbac_dir/setup"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4763,"string":"4,763"},"score":{"kind":"number","value":2.640625,"string":"2.640625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\n\n#FUNCIONS\n\nfunction creaDaemonEntorn()\n{\n systemctl stop dimoniRoot\n systemctl disable dimoniRoot\n rm /lib/systemd/system/dimoniRoot.service\n systemctl daemon-reloadg\n systemctl reset-failed\n\n cat <> /lib/systemd/system/dimoniRoot.service\n [Unit]\n Description=daemon root service\n After=network.target\n [Service]\n Type=simple\n Restart=always\n RestartSec=5\n ExecStart=/data/users/config/escolta.sh\n [Install]\n WantedBy=multi-user.target\nEOT\n\n systemctl start dimoniRoot\n systemctl enable dimoniRoot\n}\n\nfunction creaDaemonMail()\n{\n systemctl stop dimoniMail\n systemctl disable dimoniMail\n rm /lib/systemd/system/dimoniMail.service\n systemctl daemon-reload\n systemctl reset-failed\n\n cat <> /lib/systemd/system/dimoniMail.service\n [Unit]\n Description=daemon mail service\n After=network.target\n [Service]\n User=root\n Type=simple\n Restart=always\n RestartSec=5\n ExecStart=/data/users/config/repMail.sh\n [Install]\n WantedBy=multi-user.target\nEOT\n\n systemctl start dimoniMail\n systemctl enable dimoniMail\n}\n\nfunction creaConfigs()\n{\n cp /usr/bin/rbac/rbac_dir/enviroment /data/users/config/\n chmod 755 /data/users/config/enviroment\n cp /usr/bin/rbac/rbac_dir/escolta.sh /data/users/config/\n chmod 755 /data/users/config/escolta.sh\n cp /usr/bin/rbac/rbac_dir/.envia.sh /data/users/config/\n chmod 755 /data/users/config/.envia.sh\n cp /usr/bin/rbac/rbac_dir/repMail.sh /data/users/config/\n chmod 755 /data/users/config/repMail.sh\n cp /usr/bin/rbac/rbac_dir/gestioEntorn /data/users/config/\n chmod 755 /data/users/config/gestioEntorn\n cp /usr/bin/rbac/rbac_dir/removeEnviroment /data/users/config/\n chmod 755 /data/users/config/removeEnviroment\n\n\n cd /data/users/config\n \n cat < datastore\n\n /etc/skel\n 0\n 0\n x\n x\n x\n 0.5\n 512\n 50m\n 100m\nEOT\n\n cat < visitor\n bash,touch,mkdir,rm,ls,vim,nano\n /etc/skel\n 1 day\n 1 day\n x\n x\n visitor\n 0.75\n 512\n 150m\n 300m\nEOT\n\n cat < basic\n bash,touch,mkdir,rm,ls,vim,nano,gcc,make,kill\n /etc/skel\n 1 day\n persistent\n 6000\n 30000\n compartida\n 1\n 1024\n 200m\n 400m\nEOT\n\n cat < medium\n bash,touch,mkdir,rm,ls,vim,nano,gcc,make,kill,java,ln,ps,python,pip,valgrind,grep,awk,sed\n /etc/skel\n 1 day\n persistent\n 6000\n 30000\n compartida\n 1.5\n 1536\n 500m\n 1g\nEOT\n\n #cal afegir dos mes al advanced\n\n cat < advanced\n bash,touch,mkdir,rm,ls,vim,nano,gcc,make,kill,java,ln,ps,python3,pip,valgrind,grep,awk,sed,chmod,chown,strace,cat,mv,rm,rmdir,clear\n /etc/skel\n persistent\n persistent\n 6000\n 30000\n compartida\n 2\n 2048\n 750m\n 1.5g\nEOT\n}\n\nfunction creaFitxerBase()\n{\n cd /data/users\n cat < configuracio\n /data/users/config\n 30000\n 30000\n mac12llm@gmail.com \n /data/docker\n user.info\n \nEOT\n} \n\n\nfunction creaSSH()\n{\n mkdir -p /data/users/config/ssh/$admin/\n cat < /data/users/config/ssh/$admin/authorized_keys\n ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSVBbuiFnNig/DOY5hbV+XjDAaSwEv/+JXpWK/CyyjRrYmLbUG1SzYqoqs5GnX2QBUf1zVBrCvnFcos37uAHXQpeTXnnMi1wzREO7mW6XTRt4TY3rusPwuQkcD3+RT14Xm5f9Nw1wY0fDAy5wBAoCe8ir4VUkWWBkaeQ4Mb0Wh2ecspwgg5I4nY24qJIhX01DqEvzP1LQY8/lKn57HUmzFLpRQipfNDcx/4krGdgIeDdwJv8vOK03o8razVMKs11Af+lhcHsBWwDSFgo1owfsvdMLQX9THy9XqpgvQXC7rAr9C/99eXcBPizAKIpnxytjiU3pIAV/ZTTsT7v3RHnq5 alexalmansa@alex.local\n\nEOT\n\n chmod 755 /data/users/config/ssh\n chmod 755 /data/users/config/ssh/authorized_keys\n}\n\n\n\n#CONSTANTS\n\nuserhome=\"$1\"\n#admin=\"alex\"\nadmin=\"$1\"\n#SCRIPT\n\n#Afegim grups dels diferents rols\ngroupadd datastore\ngroupadd visitor\ngroupadd basic\ngroupadd medium\ngroupadd advanced\n\nrm -rf /data/users/config\nmkdir -p /data/users/config/googleauth\n\n#Crea el fitxer de configuracio que te el mail i direccio\ncreaFitxerBase\n\n#Crea els diferents fitxers de configuració per cada grup, i copia els programes necessaris\ncreaConfigs\n\n#Crea el dimoni encarregat de executar els borrats de usuaris, homes i entorns\ncreaDaemonEntorn\n\n#Crea el dimoni encarregat d'enviar el mail amb el request command\ncreaDaemonMail\n\ncreaSSH\n\nmkdir /var/log/rbac\n\ntouch /var/log/rbac/docker.log\nchmod 666 /var/log/rbac/docker.log\ntouch /var/log/rbac/user.log\nchmod 666 /var/log/rbac/user.log\ntouch /var/log/rbac/request.log\nchmod 666 /var/log/rbac/request.log\n\n\necho \"Copiant fitxers\"\ncp -r /usr/bin/rbac/rbac_dir/.bashrc /etc/skel\ncp -r /usr/bin/rbac/rbac_dir/sshd_config /etc/ssh\ncp -r /usr/bin/rbac/rbac_dir/sshd /etc/pam.d/\n\ncp -r /usr/bin/rbac/carpeta /data/\n\ndocker network create -d bridge compartida\n\necho \"Restarting ssh service\"\nservice ssh restart\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":562,"cells":{"blob_id":{"kind":"string","value":"d2cc9fa3e390d1e510429add93084974b2e0538b"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"uyirex/AzuraCast"},"path":{"kind":"string","value":"/update.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":564,"string":"564"},"score":{"kind":"number","value":3.203125,"string":"3.203125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0","LicenseRef-scancode-warranty-disclaimer"],"string":"[\n \"Apache-2.0\",\n \"LicenseRef-scancode-warranty-disclaimer\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/usr/bin/env bash\n\nfunction phpuser {\n sudo -u azuracast php $@\n}\n\nexport www_base=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nexport app_base=`realpath $www_base/..`\nexport util_base=$www_base/util\nexport tmp_base=$app_base/www_tmp\n\n# Stop system tasks\nservice nginx stop\nservice cron stop\n\n# Pull down update\ngit reset --hard\ngit pull\n\nchmod a+x ./update.sh\n\n# Clear cache\nrm -rf $tmp_base/cache/*\n\ncd $util_base\nphpuser cli.php cache:clear\nphpuser doctrine.php orm:schema-tool:update --force\n\n# Restart services\nservice cron start\nservice nginx start"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":563,"cells":{"blob_id":{"kind":"string","value":"cfa36423f55d347e9b111d015af286d37b230b12"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"ConorMcFeelyQUB/cloud-scripts"},"path":{"kind":"string","value":"/big-test/deploy-all.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4928,"string":"4,928"},"score":{"kind":"number","value":3.296875,"string":"3.296875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"set -ex\n\nADVERT_DB_INSTANCE_NAME=\"advert-db-instance\"\nPAGE_DB_INSTANCE_NAME=\"page-db-instance\"\nDB_PASSWORD=\"QUBccProject\"\nDB_TEIR=\"db-n1-standard-2\"\nREGION=\"europe-west2\"\nZONE=\"europe-west2-a\"\n\n#creating sql instance for advert db\ngcloud sql instances create $ADVERT_DB_INSTANCE_NAME \\\n --tier=\"db-n1-standard-2\" \\\n --region=\"europe-west2\" \n\n#set the rootpassword for the new sql instance\ngcloud sql users set-password root --host=% --instance $ADVERT_DB_INSTANCE_NAME --password $DB_PASSWORD\n\n\n# \\\n# --availability-type= regional\n#creating sql instance for page db\ngcloud sql instances create $PAGE_DB_INSTANCE_NAME \\\n --tier=\"db-n1-standard-2\" \\\n --region=\"europe-west2\"\n\n#set the rootpassword for the new sql instance\ngcloud sql users set-password root --host=% --instance $PAGE_DB_INSTANCE_NAME --password $DB_PASSWORD\n\n#enable sqladmin service\ngcloud services enable sqladmin.googleapis.com\n\n#allow http traffic\ngcloud compute firewall-rules create default-allow-http-8080 \\\n --allow tcp:8080 \\\n --source-ranges 0.0.0.0/0 \\\n --target-tags http-server \\\n --description \"Allow port 8080 access to http-server\"\n\n\n############################################################\n#Creating db tables and putting initial data\n\nSTATIC_IP_SQL_SETUP_INSTANCE=\"static-sql-setup\"\n\n#Creating a static IP for the sqlsetup vm\ngcloud compute addresses create $STATIC_IP_SQL_SETUP_INSTANCE \\\n --region $REGION \\\n\n#Storing the newly created static ip \nSTATIC_IP_SQL_SETUP=\"$(gcloud compute addresses describe $STATIC_IP_SQL_SETUP_INSTANCE --region $REGION --format='get(address)')\"\n\n\n#Add setup IP to authorised list for advert sql instance\ngcloud --quiet sql instances patch $ADVERT_DB_INSTANCE_NAME --authorized-networks=\"${STATIC_IP_SQL_SETUP}\",\n\n#Add setup IP to authorised list for page sql instance\ngcloud --quiet sql instances patch $PAGE_DB_INSTANCE_NAME --authorized-networks=\"${STATIC_IP_SQL_SETUP}\",\n\n#create vm instance to run mysql commands\n\nSQL_SETUP_INSTANCE_NAME=\"sql-setup-vm-instance\"\n\ngcloud compute instances create $SQL_SETUP_INSTANCE_NAME \\\n --image-family=debian-9 \\\n --image-project=debian-cloud \\\n --machine-type=g1-small \\\n --scopes userinfo-email,cloud-platform \\\n --metadata-from-file startup-script=startup-script-sql-setup.sh \\\n --zone $ZONE \\\n --tags http-server \\\n --address ${STATIC_IP_SQL_SETUP}\n\n\n#################################\n#Creating static ips for 3 VM instances and store ips\nSTATIC_IP_ADVERT_INSTANCE=\"static-advert\"\nSTATIC_IP_SEARCH_INSTANCE=\"static-search\"\nSTATIC_IP_INDEXER_INSTANCE=\"static-indexer\"\n\n#advert\ngcloud compute addresses create $STATIC_IP_ADVERT_INSTANCE \\\n --region $REGION \\\n\nSTATIC_IP_ADVERT=\"$(gcloud compute addresses describe $STATIC_IP_ADVERT_INSTANCE --region $REGION --format='get(address)')\"\n\n#search\ngcloud compute addresses create $STATIC_IP_SEARCH_INSTANCE \\\n --region $REGION \\\n\nSTATIC_IP_SEARCH=\"$(gcloud compute addresses describe $STATIC_IP_SEARCH_INSTANCE --region $REGION --format='get(address)')\"\n\n#indexer\ngcloud compute addresses create $STATIC_IP_INDEXER_INSTANCE \\\n --region $REGION \\\n\nSTATIC_IP_INDEXER=\"$(gcloud compute addresses describe $STATIC_IP_INDEXER_INSTANCE --region $REGION --format='get(address)')\"\n\n\n#authorise advert and search for the advert sql instance\ngcloud --quiet sql instances patch $ADVERT_DB_INSTANCE_NAME --authorized-networks=\"${STATIC_IP_SQL_SETUP}\",\"${STATIC_IP_ADVERT}\",\"${STATIC_IP_SEARCH}\",\n\n\n#authorise indexer and search for the page sql instance\ngcloud --quiet sql instances patch $PAGE_DB_INSTANCE_NAME --authorized-networks=\"${STATIC_IP_SQL_SETUP}\",\"${STATIC_IP_INDEXER}\",\"${STATIC_IP_SEARCH}\",\n\n#Now create the 3 VM instances giving the static ips\n\nADVERT_VM_INSTANCE_NAME=\"advert-vm-instance\"\nSEARCH_VM_INSTANCE_NAME=\"search-vm-instance\"\nINDEXER_VM_INSTANCE_NAME=\"indexer-vm-instance\"\n\n\n#advert\ngcloud compute instances create $ADVERT_VM_INSTANCE_NAME \\\n --image-family=debian-9 \\\n --image-project=debian-cloud \\\n --machine-type=g1-small \\\n --scopes userinfo-email,cloud-platform \\\n --metadata-from-file startup-script=startup-script-advert.sh \\\n --zone $ZONE \\\n --tags http-server \\\n --address ${STATIC_IP_ADVERT}\n\n#search\ngcloud compute instances create $SEARCH_VM_INSTANCE_NAME \\\n --image-family=debian-9 \\\n --image-project=debian-cloud \\\n --machine-type=g1-small \\\n --scopes userinfo-email,cloud-platform \\\n --metadata-from-file startup-script=startup-script-search.sh \\\n --zone $ZONE \\\n --tags http-server \\\n --address ${STATIC_IP_SEARCH}\n\n#indexer\ngcloud compute instances create $INDEXER_VM_INSTANCE_NAME \\\n --image-family=debian-9 \\\n --image-project=debian-cloud \\\n --machine-type=g1-small \\\n --scopes userinfo-email,cloud-platform \\\n --metadata-from-file startup-script=startup-script-indexer.sh \\\n --zone $ZONE \\\n --tags http-server \\\n --address ${STATIC_IP_INDEXER}\n\n\n#FIN\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":564,"cells":{"blob_id":{"kind":"string","value":"b132c37c36d05f9ab46a0f2c36eaff9abb98f1db"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"pklepikov/SRLinux_basics"},"path":{"kind":"string","value":"/00.prepare_host.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":419,"string":"419"},"score":{"kind":"number","value":2.96875,"string":"2.96875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#! /bin/bash\n\n# Copy the SRL inmage X.Y.Z-N.tar.xz file into Project directory on Centos 8 Host Machine.\n # TBD \n\n# Copy the license.key into Project directory.\n # TBD\n\n# Load the docker image. \n# - To load the image, the user must have root privilege, or be part of the docker group.\n docker image load -i 20.6.1-286.tar.xz\n\n# Turn off the Docker0 Tx checksum offload:\n ethtool --offload docker0 tx off\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":565,"cells":{"blob_id":{"kind":"string","value":"fe2d8d8990f15bf4c484774b933718796d21a85a"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"prariehill/sushi-card-gitbook-theme"},"path":{"kind":"string","value":"/src/build.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":979,"string":"979"},"score":{"kind":"number","value":2.765625,"string":"2.765625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/bash\n\n# Install fonts\n\nbower install;\n\n# Build global Sushi Card CSS\n\nlessc \"src/css/web.less\" \"_assets/website/sushi.css\";\n\nlessc \"src/css/print.less\" \"_assets/ebook/sushi.css\";\n\n# Put fonts where GitBook can find them\n\n# Lato\nmkdir -p _assets/website/fonts/lato;\ncp -R bower_components/lato/font/ _assets/website/fonts/lato/;\n\ncp bower_components/lato/README.md _assets/website/fonts/lato;\n\nmkdir -p _assets/ebook/fonts/lato;\ncp -R bower_components/lato/font/ _assets/ebook/fonts/lato/;\n\n# League Gothic\nmkdir -p _assets/website/fonts/league-gothic;\ncp -R bower_components/league-gothic/webfonts/leaguegothic-regular* _assets/website/fonts/league-gothic/;\n\ncp bower_components/league-gothic/*.markdown _assets/website/fonts/league-gothic/;\n\nmkdir -p _assets/ebook/fonts/league-gothic;\ncp -R bower_components/league-gothic/webfonts/leaguegothic-regular* _assets/ebook/fonts/league-gothic/;\n\ncp bower_components/league-gothic/*.markdown _assets/ebook/fonts/league-gothic/;"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":566,"cells":{"blob_id":{"kind":"string","value":"434be69408f27267e7b3fe5732d2e1d46e0e0be8"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"m-lab/epoxy-images"},"path":{"kind":"string","value":"/configs/stage3_ubuntu/opt/mlab/bin/generate_network_config.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2657,"string":"2,657"},"score":{"kind":"number","value":3.90625,"string":"3.90625"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/bash\n#\n# generate_network_config.sh finds the epoxy.ip= kernel parameter, parses it and\n# writes a networkd configuration file for the static IP to the named file.\n# generate_network_config also sets the machine hostname.\n\nOUTPUT=${1:?Please provide the name for writing config file}\n\n# TODO: Modify ePoxy to recognize both IPv4 and IPv6 addresses when\n# authenticating requests from nodes. For nodes in an environment where an\n# upstream device may have IPv6 autoconfiguration/discovery turned on, the node\n# may get an autoconf address which is not the one we use for the node.\n# Additionally, when we finally configure IPv6 on nodes, if ePoxy is not\n# configured to recognize both IPv4 and IPv6 addresses, then requests from\n# legitimate nodes from IPv6 addresses will fail.\n#\n# Disable IPv6 autoconf.\necho \"0\" > /proc/sys/net/ipv6/conf/all/accept_ra\necho \"0\" > /proc/sys/net/ipv6/conf/all/autoconf\n\n# Extract the epoxy.hostname parameter from /proc/cmdline\nif [[ `cat /proc/cmdline` =~ epoxy.hostname=([^ ]+) ]]; then\n HOSTNAME=${BASH_REMATCH[1]}\nelse\n HOSTNAME=\"localhost\"\nfi\n\n# IPv4\n#\n# Extract the epoxy.ipv4= parameter from /proc/cmdline.\n#\n# For example:\n# epoxy.ipv4=4.14.159.86/26,4.14.159.65,8.8.8.8,8.8.4.4\nif [[ `cat /proc/cmdline` =~ epoxy.ipv4=([^ ]+) ]]; then\n FIELDS_IPv4=${BASH_REMATCH[1]}\nelse\n # Use default values for VM testing.\n FIELDS_IPv4=\"192.168.0.2,192.168.0.1,8.8.8.8,8.8.4.4\"\nfi\n\n# Extract all helpful IPv4 fields.\nADDR_IPv4=$( echo $FIELDS_IPv4 | awk -F, '{print $1}' )\nGATEWAY_IPv4=$( echo $FIELDS_IPv4 | awk -F, '{print $2}' )\nDNS1_IPv4=$( echo $FIELDS_IPv4 | awk -F, '{print $3}' )\nDNS2_IPv4=$( echo $FIELDS_IPv4 | awk -F, '{print $4}' )\n\n# IPv6\n#\n# Extract the epoxy.ipv6= parameter from /proc/cmdline.\n#\n# For example:\n# epoxy.ipv6=2001:1900:2100:2d::86/64,2001:1900:2100:2d::1,2001:4860:4860::8888,2001:4860:4860::8844\nif [[ `cat /proc/cmdline` =~ epoxy.ipv6=([^ ]+) ]]; then\n FIELDS_IPv6=${BASH_REMATCH[1]}\nfi\n\n# Extract all helpful IPv6 fields.\nADDR_IPv6=$( echo $FIELDS_IPv6 | awk -F, '{print $1}' )\nGATEWAY_IPv6=$( echo $FIELDS_IPv6 | awk -F, '{print $2}' )\nDNS1_IPv6=$( echo $FIELDS_IPv6 | awk -F, '{print $3}' )\nDNS2_IPv6=$( echo $FIELDS_IPv6 | awk -F, '{print $4}' )\n\n\n# Note, we cannot set the hostname via networkd. Use hostnamectl instead.\nhostnamectl set-hostname ${HOSTNAME}\n\n# TODO: do not hardcode /26.\n# TODO: do not hardcode eth0.\ncat > ${OUTPUT} <&1 | awk -F '\"' '/version/ {print $2}')\necho \"java version $version\"\nif [[ \"$version\" < \"1.8\" ]]; then\n\terror_all \"Error: version JAVA < 1.8\"\nelse\n\techo \"JAVA_HOME=$JAVA_HOME . OK!\"\nfi\n\n# Останавливаем сервисы xvfb если они по какой-то причине остались с прошлой сборки\n# для инфомраци: не обладаем правами закрыть процессы другого пользователя\necho \"Getting rid of traces left by Xvfb\"\nkillall Xvfb || true\nrm -Rf $HOME/xvfb/*\n\necho \" --- Setting up build environment --- \"\n# Setting up local maven repo for this specific version (branch).\nexport MAVEN_OPTS=\"-Xmx768m -Dfile.encoding=UTF-8 -Dmaven.repo.local=${HOME}/maven_repo/${branch}\"\n\n# Prepare INSTALL_PATH\nwildflyVars=(wildflyVersion wildflyPackage installPath)\n# TASK-86433, v.semchenko (25.08.2017): версию берем из work.properties настройки argus.teamcity.server-package\nwildflyVersion=`sed -n 's/argus\\.teamcity\\.server\\-package=wildfly\\-//p' ${WORKSPACE}/work.properties`\nwildflyPackage=\"wildfly-${wildflyVersion}\"\ninstallPath=\"${HOME}/servers/$branch/$wildflyPackage/\"\n\n# Check wildfly var for installpath\nfor var in ${wildflyVars[@]}; do\n if [ -z \"${!var}\" ]; then\n error_all \"Variable $var is not set\"\n fi\ndone\n\ncd $BUILDDIR\nif [ ! -d $installPath ]; then\n echo \"WildFly directory does not exist, will create when install distrib.\"\nelse\n\t# на всяки случай чистим каталог будущей установки\n\techo \"WildFly directory exists. Cleaning directory.\"\n\trm -rf $installPath/*\n\t[ $? -ne 0 ] && error_all \"Unknown error\"\nfi\n\n# Prepare configurations workspace/my.properties\necho \"Prepare configuration $WORKSPACE/my.properties\"\necho \"INSTALL_PATH=$installPath\" | cat > $WORKSPACE/my.properties\necho 'argus.app.memory.max-size=3600' | cat >> $WORKSPACE/my.properties\necho 'argus.app.debug-mode.enabled=true' | cat >> $WORKSPACE/my.properties\n# на хосте также есть агент teamcity с СП под ui-теcты, для перестраховки взял смещение 10\necho \"jboss.socket.binding.port-offset=10\" | cat >> $WORKSPACE/my.properties\nmajorVersion=`sed -n 's/ops\\.app\\.version=//p' ${WORKSPACE}/work.properties`\necho \"argus.app.build-number=$majorVersion.$CI_PIPELINE_ID\" | cat >> $WORKSPACE/my.properties\necho \"jboss.bind.address=127.0.0.1\" | cat >> $WORKSPACE/my.properties\necho \"argus.app.admin.user=developer\" | cat >> $WORKSPACE/my.properties\necho \"argus.app.admin.pass=developer\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.enabled=true\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.smtp.user=ops.noreply@argustelecom.ru\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.smtp.pass=DutyFr33!\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.smtp.port=25\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.smtp.host=mail.argustelecom.ru\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.smtp.ssl.enabled=false\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.smtp.starttls.enabled=false\" | cat >> $WORKSPACE/my.properties\necho \"argus.mail.smtp.auth.enabled=true\" | cat >> $WORKSPACE/my.properties\n\nif [ $mode == \"ui-tests\" ] || [ $mode == \"ui-tests-pa\" ]; then\n\t# переменую с именем БД Ops объявляем в .gitlab-ci.yml\n\techo \"Using database DB_NAME: $DB_NAME\"\n\techo \"Prepare configuration argus.db.*\"\n\t# переменую с адресом хоста баз Box объявляем в .gitlab-ci.yml\n\techo \"argus.db.address=$HostDB\" | cat >> $WORKSPACE/my.properties\n\techo \"argus.db.port=5432\" | cat >> $WORKSPACE/my.properties\n\techo \"argus.db.name=$DB_NAME\" | cat >> $WORKSPACE/my.properties\n\techo \"argus.db.user=argus_sys\" | cat >> $WORKSPACE/my.properties\n\techo \"argus.db.pass=vk38gwwm\" | cat >> $WORKSPACE/my.properties\n\n\tif [ $mode == \"ui-tests-pa\" ]; then\n\n \t\techo ' -- Install server Ops -- '\n\t\tbuild_number=`sed -n 's/argus\\.app\\.build-number=//p' ${WORKSPACE}/my.properties`\n\t\tname_distr=\"ops-dist-${build_number}.jar\"\n\t\tif [ ! -f \"$WORKSPACE/server-conf/ops-dist/target/$name_distr\" ]; then\n \t\techo \"Not found $name_distr in directory $WORKSPACE/server-conf/ops-dist/target\"\n\t\t\terror_all \"Install application server: not found distrib\"\n\t\telse\n\t\t\techo \"Found distrib $name_distr\"\n\t\tfi\n\t\tcd $BUILDDIR/$WORKSPACE/server-conf/ops-dist/target\n\t\tjava -jar $name_distr -options $BUILDDIR/$WORKSPACE/my.properties\n\t\t[ $? -ne 0 ] && error_all \"Install application server failed\"\n\n\t\techo ' -- Start server Ops -- '\n\t\tcd $BUILDDIR/$WORKSPACE/server-app/inf-modules/webui\n\t\tmvn pre-integration-test -Pbefore-ui-tests-build-start-appserver\n\t\t[ $? -ne 0 ] && error_all \"Start server Ops\"\n\t\tcd $BUILDDIR\n\n\t\techo \" -- Prepare configuration for Personal Area -- \"\n\t\tinstallPath=\"${HOME}/servers/${branch}/${wildflyPackage}-PA/\";\n\t\tsed -i -e \"s|INSTALL_PATH=.*|INSTALL_PATH=${installPath}|1\" $WORKSPACE/my.properties\n\t\t# >> на хосте также есть агент teamcity с СП под ui-теcты, для перестраховки взял смещение 10,\n\t\t# а для СП личного кабинета port-offset=20\n\t\tsed\t-i 's/jboss.socket.binding.port-offset=.*/jboss.socket.binding.port-offset=20/1' $WORKSPACE/my.properties\n\t\techo \"argus.security.login-module=ru.argustelecom.ops.inf.login.PersonalAreaLoginModule\" | cat >> $WORKSPACE/my.properties\n\t\techo \"argus.test.ui.remotearm=true\" | cat >> $WORKSPACE/my.properties\n\t\techo \"contextRoot=\" | cat >> $WORKSPACE/my.properties\n\t\t# ! незабывай учитывать в настройке argus.test.provider.address смещение порта \"обыного\" СП Ops\n\t\techo \"argus.test.provider.address=127.0.0.1:8090\" | cat >> $WORKSPACE/my.properties\n\tfi\nfi\n\necho \"Result configuration $WORKSPACE/my.properties: \"\ncat $WORKSPACE/my.properties | grep -Ev \"(^#|^$)\"\n\n# selenium тесты только в сборке ui-tests\nif [ $mode == \"ui-tests\" ] || [ $mode == \"ui-tests-pa\" ]; then\n\t# We use xvfb to run selenium tests on headless environment\n\techo \"Starting up Xvfb server\";\n\t/etc/init.d/xvfb_start_script.sh start $buildcounter $branch\n\t[ $? -ne 0 ] && error_all \"error: xvfb_start_script.sh start $buildcounter $branch\"\n\texport DISPLAY=:$buildcounter\nfi\n\n# Deprecated. Convert shell scripts from dos to unix format\ndos2unix make_all.sh && ./make_all.sh $mode\n[ $? -ne 0 ] && error_all \"error execute make_all.sh\"\n\nend_build\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":569,"cells":{"blob_id":{"kind":"string","value":"58749a71f0a80ef838c6a893aa7604bee978d73c"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"minjun-jang/ocpinstall"},"path":{"kind":"string","value":"/00.prepare/05_chrony_setting.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1188,"string":"1,188"},"score":{"kind":"number","value":2.953125,"string":"2.953125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/sh\n\nABSOLUTE_PATH=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nsource \"${ABSOLUTE_PATH}/config/openshift.env\"\n\necho -e \"\\033[32m[S]=============================================================================\\033[0m\"\necho -e \"\\033[46m@@@[S]_[YUM INSTALL CHRONY] ==> ${CHRONY_SERVER}\\033[0m\"\nAsystemctl stop ntpd\nsystemctl disable ntpd\ntimedatectl set-timezone Asia/Seoul\ntimedatectl status\nyum -y remove ntp\nyum -y install chrony\nsed -i \"s/^server/#server/g\" /etc/chrony.conf\nsed -i \"s/^allow/#allow/g\" /etc/chrony.conf\nsed -i \"s/^local/#local/g\" /etc/chrony.conf\nsed -i'' -r -e \"/#server\\ 3.rhel.pool.ntp.org\\ iburst/a\\server\\ bastion.${DNS_DOMAIN}\\ iburst\" /etc/chrony.conf\nsed -i'' -r -e \"/^#allow\\ 192.168.0.0\\/16/a\\allow\\ ${CHRONY_ALLOW}\" /etc/chrony.conf\nsed -i'' -r -e \"/^#local\\ stratum\\ 10/a\\local\\ stratum\\ ${CHRONY_STRATUM}\" /etc/chrony.conf\nfirewall-cmd --permanent --add-port=123/udp\nfirewall-cmd --reload\nsystemctl enable chronyd\nsystemctl restart chronyd\nchronyc sources -v\nchronyc tracking\necho -e \"\\033[36m@@@[E]_[YUM INSTALL CHRONY] ==> ${CHRONY_SERVER}\\033[0m\"\necho -e \"================================================================================[E]\"\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":570,"cells":{"blob_id":{"kind":"string","value":"90dfbea16a8fc624736a5c61c6522ff59515b442"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"ajsalminen/dotfiles"},"path":{"kind":"string","value":"/shell/functions/ag_truncate_lines.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":577,"string":"577"},"score":{"kind":"number","value":3.40625,"string":"3.40625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT","BSD-2-Clause"],"string":"[\n \"MIT\",\n \"BSD-2-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"# page only when needed, set term to truncate lines for minified code etc.\n# default to insensitive matches and include a global agignore for minified exts.\n# Some info http://unix.stackexchange.com/questions/109211/preserving-color-output-with-cut\nag_truncate_lines() {\n tput rmam # turn off automatic margin mode.\n # reset colors at the beginning of line because rmam can cut some seqs out.\n ag --color -i --path-to-ignore=~/.grepignore \"$@\" | sed \"s/^/$(tput sgr0)/\" | less -XFr\n local ret=\"$?\"\n tput smam # turn on automatic margin mode.\n return \"$ret\"\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":571,"cells":{"blob_id":{"kind":"string","value":"316593ccee6d1cb7cce06ad2302e1024726d5d17"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"Sunil2914/network"},"path":{"kind":"string","value":"/mirror/create_mirror_misc.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1701,"string":"1,701"},"score":{"kind":"number","value":3.25,"string":"3.25"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/bash -ev\nexport DISTRO=$(cat /etc/*-release|grep ^ID\\=|awk -F\\= {'print $2'}|sed s/\\\"//g)\n\nif [[ \"${DISTRO}\" == \"ubuntu\" ]]; then\n apt-get install -y apt-transport-https curl\nfi\n\n[[ -z ${MIRROR_BUILD_DIR} ]] && export MIRROR_BUILD_DIR=${PWD}\n[[ -z ${MIRROR_OUTPUT_DIR} ]] && export MIRROR_OUTPUT_DIR=${PWD}/mirror-dist\n\nSTATIC_FILE_LIST=$(<${MIRROR_BUILD_DIR}/dependencies/pnda-static-file-dependencies.txt)\nPLUGIN_LIST=$(<${MIRROR_BUILD_DIR}/dependencies/pnda-logstash-plugin-dependencies.txt)\n\nSTATIC_FILE_DIR=$MIRROR_OUTPUT_DIR/mirror_misc\nmkdir -p $STATIC_FILE_DIR\ncd $STATIC_FILE_DIR\necho \"$STATIC_FILE_LIST\" | while read STATIC_FILE\ndo\n echo $STATIC_FILE\n curl -LOJf --retry 5 --retry-max-time 0 $STATIC_FILE\ndone\ncat SHASUMS256.txt | grep node-v6.10.2-linux-x64.tar.gz > node-v6.10.2-linux-x64.tar.gz.sha1.txt\nsha512sum je-5.0.73.jar > je-5.0.73.jar.sha512.txt\nsha512sum Anaconda2-4.0.0-Linux-x86_64.sh > Anaconda2-4.0.0-Linux-x86_64.sh.sha512.txt\n\nif [ \"x$DISTRO\" == \"xrhel\" -o \"x$DISTRO\" == \"xcentos\" ]; then\n yum install -y java-1.7.0-openjdk\nelif [ \"x$DISTRO\" == \"xubuntu\" ]; then\n apt-get install -y default-jre\nfi\n\ncd /tmp\ncurl -LOJf --retry 5 --retry-max-time 0 https://artifacts.elastic.co/downloads/logstash/logstash-5.2.2.tar.gz\ntar zxf logstash-5.2.2.tar.gz\nrm logstash-5.2.2.tar.gz\ncd logstash-5.2.2\n# work around bug introduced in 5.1.1: https://discuss.elastic.co/t/5-1-1-plugin-installation-behind-proxy/70454\nJARS_SKIP='true' bin/logstash-plugin install $PLUGIN_LIST\nbin/logstash-plugin prepare-offline-pack $PLUGIN_LIST\nchmod a+r logstash-offline-plugins-5.2.2.zip\nmv logstash-offline-plugins-5.2.2.zip $STATIC_FILE_DIR/logstash-offline-plugins-5.2.2.zip\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":572,"cells":{"blob_id":{"kind":"string","value":"60687989a9899faf88ddbe93c4d98b960a657be1"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"josh43/RestBuilder"},"path":{"kind":"string","value":"/RunAndUpdateScript.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":417,"string":"417"},"score":{"kind":"number","value":2.625,"string":"2.625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n#make sure to install and include express and multer in your package.json\nmake\n./main\ntheSource=${PWD}/Rest/\nobjcTarget=\"/Users/josh/Documents/CS Projects/RestExample/RestExample/\"\njsTarget=\"/Users/josh/Documents/CS Projects/RestGeneratorTester/routes/\"\necho \"Copying files at $theSource to $objcTarget\"\n\ncp -f -R \"${theSource}/OBJC\" \"${objcTarget}\"\ncp -f -R \"${theSource}/JS/\" \"${jsTarget}\"\n\nmake clean"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":573,"cells":{"blob_id":{"kind":"string","value":"265334cd9cfebe6c90c6dfb6d4103701008b1bfa"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"neh/myconf"},"path":{"kind":"string","value":"/bin/polybar.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":873,"string":"873"},"score":{"kind":"number","value":3.296875,"string":"3.296875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/usr/bin/env sh\n\nlaptop_display='eDP-1'\n\n# Terminate already running bar instances\nkillall -q polybar\n\n# Wait until the processes have been shut down\nwhile pgrep -x polybar >/dev/null; do sleep 1; done\n\n# Get network interface names\nexport INTERFACE_WIRED=$(ip link show | cut -d' ' -f2 | tr -d ':' | grep '^en')\nexport INTERFACE_WIRELESS=$(ip link show | cut -d' ' -f2 | tr -d ':' | grep '^wl')\n\nprimary=$(xrandr | grep ' connected primary' | cut -d' ' -f1)\n\nfor m in $(polybar --list-monitors | cut -d\":\" -f1); do\n if [[ \"$m\" == \"$primary\" ]]; then\n # the --reload option doesn't seem to work under i3, but\n # doesn't hurt either\n if [[ \"$m\" == \"$laptop_display\" ]]; then\n MONITOR=$m polybar primary-laptop &\n else\n MONITOR=$m polybar default &\n fi\n else\n MONITOR=$m polybar secondary &\n fi\ndone\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":574,"cells":{"blob_id":{"kind":"string","value":"ceb4786295f1fb9dd1e0aa4567fbfdcef1b6f0c0"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"mrstepanovic/bradmci"},"path":{"kind":"string","value":"/tools/bak/old.fc_analysis"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":7941,"string":"7,941"},"score":{"kind":"number","value":3.546875,"string":"3.546875"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\n# source matlab\nsource /autofs/cluster/animal/scripts/matlab/matlab_74\n\n#\n# For each subject, extracts time courses for each run\n# and computes correlation coefficients using ROIs.\n# Also generates a corresponding matrix graphic. \n#\n\nfunction usage {\n\n echo \" ______ _____ _ _ _ __ _______ _____ _____ \"\n echo \"| ____/ ____| /\\ | \\ | | /\\ | | \\ \\ / / ____|_ _|/ ____|\"\n echo \"| |__ | | / \\ | \\| | / \\ | | \\ \\_/ / (___ | | | (___ \"\n echo '| __|| | / /\\ \\ | . ` | / /\\ \\ | | \\ / \\___ \\ | | \\___ \\ ~~~ version 1.2.1'\n echo \"| | | |____ / ____ \\| |\\ |/ ____ \\| |____| | ____) |_| |_ ____) |\"\n echo \"|_| \\_____| /_/ \\_\\_| \\_/_/ \\_\\______|_| |_____/|_____|_____/ \"\n \n echo \"\"\n echo \"REQUIRED:\"\n echo \" -i Subject names\"\n echo \" -r ROI file locations\"\n echo \" -n ROI names, default ROI file name\"\n echo \" -l Analysis label, e.g. 'DMN' (no spaces)\"\n echo \"\"\n echo \" Note: when using multiple arguments, place in quotes:\"\n echo \" fc_analysis -i \\\"SUBJ1 SUBJ2 SUBJ3\\\" -r \\\"ROI1 ROI2\\\" -n \\\"PCC HIPPO\\\" -l 'PCC_HIPPO_FC'\"\n echo \"\"\n echo \"OR:\"\n echo \" -I File containing list of subjects\"\n echo \" -R File containing list of ROI files\"\n echo \" -N File containing list of ROI names\"\n echo \" -l Analysis label, e.g. 'DMN' (no spaces)\"\n echo \"\"\n echo \"MORE OPTIONS:\"\n echo \" -u Filename of run list (as found in /)\"\n echo \" -k Gaussian smoothing BOLD to process, default 6mm\"\n echo \" -e Force timecourse extraction\"\n echo \" -t Do not calculate subject averages\"\n echo \" -s Do not calculate group averages\"\n echo \" -m Make images of coefficent matrices\"\n echo \" -p Only show coefficients where p < arg. (default is .05).\"\n echo \" All other values are converted to 0.000 on matrix plots\" \n echo \" -o Output directory, default ./GROUP_STATS//FC\"\n echo \" -h Help\"\n echo \"\" \n}\n\nROINAMES=\"\"\nRD=\"$PWD/GROUP_STATS/$(whoami)/FC\";\nPVAL=\"0.050\";\nSMOOTH=6;\n\nwhile getopts \"i:r:I:R:n:N:u:l:o:p:k:e h t s m\" o ; do\n case $o in\n i ) SUBJECTS=$OPTARG;;\n\t I ) SUBJECTS=$(cat $OPTARG | xargs);;\n\t r ) ROIS=$OPTARG;;\n\t R ) ROIS=$(cat $OPTARG | xargs);;\n\t n ) ROINAMES=$OPTARG;;\n\t N ) ROINAMES=$(cat $OPTARG | xargs);;\n\t k ) SMOOTH=$OPTARG;;\n\t e ) FORCEEXTRACT=1;;\n\t p ) PVAL=$OPTARG;;\n\t l ) AL=$OPTARG;;\n\t o ) RD=$OPTARG;;\n\t t ) SKIPSUB=1;;\n\t s ) SKIPGRP=1;;\n\t m ) MAKEIMG=1;;\n\t u ) RUNLISTFILE=$OPTARG;;\n\t h ) usage;\n\t exit 0;;\n esac\ndone\n\nif [ $# -eq 0 ]; then\n usage; exit 0;\nfi\n\nresultsdir=${RD}\n\nif [ ${#SUBJECTS} -lt 1 ]; then\n echo \"Error: Please provide subjects.\";\n usage; exit 0;\nfi\n\nif [ ${#ROIS} -lt 1 ]; then\n echo \"Error: Please provide at least 2 ROIs.\"\n usage; exit 0;\nfi\n\nif [ ! $AL ]; then\n echo \"Error: Please specify an analysis label, using -l.\";\n exit 0;\nfi\n\nif [ $SMOOTH -eq 0 ]; then\n SMOOTH_HALF_F=0\nelse\n SMOOTH_HALF_F=$( echo 'scale=15;(.4412712/('$SMOOTH'/100))' | bc | awk '{print int($1)}' );\nfi\n\necho \"SUBJECTS: \"$SUBJECTS;\necho \"\"\necho \"ROIS: \"$ROIS;\necho \"\"\necho \"ROI NAMES: $ROINAMES\";\necho \"\"\n\n\nfunction extract {\n n=1;\n comm=\"addpath('/cluster/animal/scripts/matlab/'); all=[]; \"\n \n mkdir -p ${resultsdir}/${AL}\n mkdir -p ${resultsdir}/${AL}/timecourses\n for subject in $SUBJECTS; do\n comm=\"${comm} subj=[];\"\n\tfc=$( ls -d ${subject}/bold/0*/ | wc | awk '{print $1}' )\n\tif [ $fc == 0 ]; then\n\t echo -e \"Error: No BOLD runs found for subject ${subject}.\"\n\t exit\n\tfi\n\t\n\t### CHANGES FOR 1.2 MADE BELOW ###\n\t## concatenate the timecourses.... UGH, WHY DID I NOT DO THIS BEFORE?!? I surprise myself with my stupidity\n\t\n\tppstr=$(/cluster/animal/scripts/NRG-fc/functions/fc_ppstr $subject)\n\tRUNLISTtxt=${subject}/fcMRI/${subject}_${ppstr}_g${SMOOTH_HALF_F}_bpss_resid.txt\n\n\tif [ \"$RUNLISTFILE\" != \"\" ]; then\n\t if [ ! -r $subject/$RUNLISTFILE ]; then\n\t \techo \"*** No runlist file ( $RUNLISTFILE ) found for ${subject}! Skipping subject... ***\"\n\t\tcontinue;\n\t else\n\t \techo \"using specific runlist file... $subject/$RUNLISTFILE\"\n\t \tt=$(cat $subject/$RUNLISTFILE | xargs);\n\t\techo \" $t\"\n\t\tfor x in $t; do\n\t\t ga=\"$ga-e /bold/$x/ \"\n\t\tdone\n\t\tcat $RUNLISTtxt | grep -i $ga > /tmp/${subject}.${RUNLISTFILE}.$$.txt\n\t\tRUNLIST=/tmp/${subject}.${RUNLISTFILE}.$$.txt\n\t\t\n\t fi\n\telse\n\t RUNLIST=$RUNLISTtxt\n\tfi\n\t\n\tformat=\"\"\n\tfor x in $(cat $RUNLIST); do\n\t if [ ! -e $x\".nii.gz\" ]; then\n\t\techo \"Error: cannot find file $x.nii.gz\"\n\t\texit;\n\t else\n\t\ttp=$(fslnvols $x\".nii.gz\")\n\t\tformat=\"${format}0x${tp}+\"\n\t fi\n\tdone\n\n\n\tcomm=\"${comm} tc=[];\"\n\tseednames=\"\"\n\tfor seed in $ROIS; do\n\t seedname=$(basename ${seed%%.nii*})\n\t seednames=\"$seednames '$seedname', \"\n\t fileloc=${resultsdir}/${AL}/timecourses/${subject}_${seedname}\n\t if [ ! -r ${fileloc}.voxt.dat ] || [ $FORCEEXTRACT ]; then\n\t \techo \"Extracting timecourse for subject: $subject, ROI: $seedname\"\n\t\tqnt_nifti -s -list $RUNLIST ${seed} | awk '$1 !~/#/ {print $2}' > ${fileloc}.voxt.dat\n\t\tif [ ! -r ${fileloc}.voxt.dat ]; then\n\t\t echo \"Error: Cannot find ${fileloc}.voxt.dat. Trying again...\";\n\t\t qnt_nifti -s -list $RUNLIST ${seed} | awk '$1 !~/#/ {print $2}' > ${fileloc}.voxt.dat\n\t \tfi\n\t else\n\t \techo \"Using previous timecourse file -- subject: $subject, ROI: $seedname\"\n\t fi\n\t if [ ! -r ${fileloc}.voxt.dat ]; then\n\t\techo \"Error: Cannot find ${fileloc}.voxt.dat. Try forcing timecourse extraction using -e.\"; exit;\n\t fi\n\n \t if [ \"$(cat ${fileloc}.voxt.dat | wc -w)\" == \"0\" ]; then\n\t \techo \"Error: ${fileloc}.voxt.dat is empty. Forcing extraction...\"; \n\t\tqnt_nifti -s -list $RUNLIST ${seed} | awk '$1 !~/#/ {print $2}' > ${fileloc}.voxt.dat\n\t fi\n\t comm=\"${comm} tcf= load('${fileloc}.voxt.dat'); tc=[tc tcf];\\n\"\n done\n\tif [ $(echo ${ROINAMES} | wc -w) -eq $(echo ${ROIS} | wc -w) ]; then\n\t seednames=\"'${ROINAMES// /', '}'\";\n\tfi\n\t# calculate coef\n\tcomm=\"${comm} [fc, pval] = corrcoef(tc); fcz = real(r_2_fisher_z(fc)); \\n\"\n \tcomm=\"${comm} all(:,:,${n}) = fcz; allr(:,:,${n}) = fc; \\n\"\n\n \t### END CHANGES TO 1.2 ###\n\n\t\n\tif [ ! $SKIPSUB ]; then\n\t fname=\"${resultsdir}/${AL}/${subject}.dat\"\n\t comm=\"${comm} save ${fname} fcz -ascii;\\n\"\n\t comm=\"${comm} fprintf('writing: ${fname}\\\\\\n'); \\n\"\n\t \n\t if [ $MAKEIMG ]; then\n\t\tcomm=\"${comm} cfig = fcmatrix(fcz, {${seednames}}, '${AL}: ${subject}');\\n\"\n\t\tfname=\"${resultsdir}/${AL}/${subject}.png\"\n\t\tcomm=\"${comm} saveas(cfig, '${fname}', 'png');\\n\"\n\t\tcomm=\"${comm} fprintf('writing: ${fname}\\\\\\n'); \\n\"\n\t fi\n\tfi\n\tn=$(($n+1))\n done\n if [ ! $SKIPGRP ]; then\n fname=\"${resultsdir}/${AL}/${AL}.dat\"\n\tfnamer=\"${resultsdir}/${AL}/${AL}_r.dat\"\n\tfmat=\"${resultsdir}/${AL}/${AL}.mat\"\n\tcomm=\"${comm} save('${fmat}', 'all'); \\n\"\n\tcomm=\"${comm} allm = mean(all,3); save ${fname} allm -ascii;\\n\"\n\tcomm=\"${comm} allmr = mean(allr,3); save ${fnamer} allmr -ascii;\\n\"\n\tcomm=\"${comm} fprintf('writing: ${fname}\\\\\\n'); \\n\"\n\tfname=\"${resultsdir}/${AL}/${AL}_sd.dat\"\n\tcomm=\"${comm} allstd = std(all,0,3); save ${fname} allstd -ascii;\\n\"\n\tcomm=\"${comm} fprintf('writing: ${fname}\\\\\\n'); \\n\"\n\tif [ $MAKEIMG ]; then\n\t comm=\"${comm} cfig = fcmatrix(allm, {${seednames}}, '${AL}');\\n\"\n\t fname=\"${resultsdir}/${AL}/${AL}.png\"\n\t comm=\"${comm} saveas(cfig, '${fname}', 'png');\\n\"\n\t comm=\"${comm} fprintf('writing: ${fname}\\\\\\n'); quit; \\n\"\n\tfi\n fi\n echo -e $comm\n cmdfile=\"$(whoami)$$.m\"\n echo -e $comm > $cmdfile\n run_matlab -nosplash -nodesktop -nojvm -r \"${cmdfile%%.m}; exit\"\n rm $cmdfile\n}\n\nextract;\n\nif [ -e ${resultsdir}/${AL}/ ]; then\n cd ${resultsdir}/${AL}/;\n /cluster/animal/scripts/NRG-fc/functions/flatten_matrices \"$SUBJECTS\" \"$ROINAMES\" \"$(basename ${AL})\"\nfi\n\necho \"Finished!\"\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":575,"cells":{"blob_id":{"kind":"string","value":"50fa2c1730aefd8637cd32addbffe4cfbb8a7008"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"mkg20001/nix"},"path":{"kind":"string","value":"/cron/clean-node-modules.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":296,"string":"296"},"score":{"kind":"number","value":3.53125,"string":"3.53125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\nset -eo pipefail\n\nif [ -z \"$1\" ]; then\n CMD=$(readlink -f \"$0\")\n\n find /home -iname node_modules -mtime +14 -prune -exec bash $CMD {} \\;\nelse\n if [ -z \"$(echo \"$1\" | tr \"/\" \"\n\" | grep \"^\\\\.\")\" ]; then\n echo \" -- RM $1 -- \" >&2\n rm -rf \"$1\"\n else\n echo \"keep $1\"\n fi\nfi\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":576,"cells":{"blob_id":{"kind":"string","value":"0d6084cbf565237015bb7667fad77edede99bdae"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"JuanDAC/test_others_simple_shell"},"path":{"kind":"string","value":"/cd_multiargs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":371,"string":"371"},"score":{"kind":"number","value":3.046875,"string":"3.046875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\ncommand1=\"cd /bin /tmp slkfjl\"\n\n# Stop any running shells\nstop_shell\n\n# Run command\necho $command$'\\n'pwd | \"$SHELL\" > \"$YOUR_OUTPUT\" 2> \"$YOUR_ERROR\" &\necho $command$'\\n'pwd | sh > \"$EXPECTED_OUTPUT\" 2> \"$EXPECTED_ERROR\"\n\n# Wait for one second\n\"$SLEEP\" \"$SLEEPSECONDS\"\n\n# Check the output\ncheck_output\n\n# Check the errors\ncheck_error\n\n# Clean up\nstop_shell\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":577,"cells":{"blob_id":{"kind":"string","value":"b49e467d4f01b32bfaaa1f2809e1082f4ffd7d24"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"NDari/dotfiles"},"path":{"kind":"string","value":"/scripts/setup_laptop.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1787,"string":"1,787"},"score":{"kind":"number","value":3.734375,"string":"3.734375"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/sh\n\n# Modified version of thoughtbots script\n# https://github.com/thoughtbot/laptop/blob/master/mac\n\nfancy_echo() {\n local fmt=\"$1\"; shift\n\n printf \"\\n$fmt\\n\" \"$@\"\n}\n\ntrap 'ret=$?; test $ret -ne 0 && printf \"failed\\n\\n\" >&2; exit $ret' EXIT\n\nset -e\n\nif [ ! -d \"$HOME/bin/\" ]; then\n mkdir \"$HOME/bin\"\nfi\n\nif [ ! -f \"$HOME/.zshrc\" ]; then\n touch \"$HOME/.zshrc\"\nfi\n\nHOMEBREW_PREFIX=\"/usr/local\"\n\nif [ -d \"$HOMEBREW_PREFIX\" ]; then\n if ! [ -r \"$HOMEBREW_PREFIX\" ]; then\n sudo chown -R \"$LOGNAME:admin\" /usr/local\n fi\nelse\n sudo mkdir \"$HOMEBREW_PREFIX\"\n sudo chflags norestricted \"$HOMEBREW_PREFIX\"\n sudo chown -R \"$LOGNAME:admin\" \"$HOMEBREW_PREFIX\"\nfi\n\nif ! command -v brew >/dev/null; then\n fancy_echo \"Installing Homebrew ...\"\n curl -fsS \\\n 'https://raw.githubusercontent.com/Homebrew/install/master/install' | ruby\nfi\n\nfancy_echo \"Installing brew packages\"\nbrew update --force # https://github.com/Homebrew/brew/issues/1151\nbrew bundle --file=$HOME/dotfiles/Brewfile\n\nupdate_shell() {\n local shell_path;\n shell_path=\"$(which zsh)\"\n\n fancy_echo \"Changing your shell to zsh ...\"\n if ! grep \"$shell_path\" /etc/shells > /dev/null 2>&1 ; then\n fancy_echo \"Adding '$shell_path' to /etc/shells\"\n sudo sh -c \"echo $shell_path >> /etc/shells\"\n fi\n sudo chsh -s \"$shell_path\" \"$USER\"\n}\n\ncase \"$SHELL\" in\n */zsh)\n if [ \"$(which zsh)\" != '/usr/local/bin/zsh' ] ; then\n update_shell\n fi\n ;;\n *)\n update_shell\n ;;\nesac\n\ncd $HOME\n\ncurl -L https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh | sh\n\nif [ ! -d \".config/nvim\" ]; then\n mkdir -p \".config/nvim\"\nfi\n\nln -s dotfiles/.ctags\nln -s dotfiles/.tmux.conf\nln -s dotfiles/.zshrc\nln -s dotfiles/.zlogout\nln -s dotfiles/.inputrc\ncd .config/nvim\nln -s ~/dotfiles/init.vim .\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":578,"cells":{"blob_id":{"kind":"string","value":"f03f45e5735d395f886c29f3b6c26cbc489749ea"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"it4ng/sendemail"},"path":{"kind":"string","value":"/sendmail"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":491,"string":"491"},"score":{"kind":"number","value":2.671875,"string":"2.671875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/sh\n\n[ -z \"$MAIL_RECIPIENT\" ] && echo \"MAIL_RECIPIENT is not set, exiting\" && exit 1\n[ -z \"$MAIL_SENDER\" ] && echo \"MAIL_SENDER is not set, exiting\" && exit 1\n[ -z \"$MAIL_SUBJECT\" ] && echo \"MAIL_SUBJECT is not set, exiting\" && exit 1\n[ -z \"$MAIL_MESSAGE\" ] && echo \"MAIL_MESSAGE is not set, exiting\" && exit 1\n[ -z \"$MAIL_SERVER\" ] && echo \"MAIL_SERVER is not set, exiting\" && exit 1\n\nsendemail -f $MAIL_SENDER -t $MAIL_RECIPIENT -u \"$MAIL_SUBJECT\" -m \"$MAIL_MESSAGE\" -s $MAIL_SERVER\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":579,"cells":{"blob_id":{"kind":"string","value":"6db955d58df07c34a69cc97d30286ce64453e580"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"prasanthkumar3103/My-AWS-Handy-Scripts"},"path":{"kind":"string","value":"/get_awsInstanceId.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":285,"string":"285"},"score":{"kind":"number","value":2.984375,"string":"2.984375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n# Author: Ajaya Kumar Loya\n# This script will give you the instanceID when executed.\n# Note this need be running on a Instance of which you need Instance ID.\n\nexport INSTANCE_ID=`curl --silent http://169.254.169.254/latest/meta-data/instance-id`\necho \"Instance ID => \"${INSTANCE_ID}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":580,"cells":{"blob_id":{"kind":"string","value":"fe898ea27e375d9f41533b3115089f81617a4d0d"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"xianlimei/zhuxianB30"},"path":{"kind":"string","value":"/scripts/build_functions/.svn/text-base/device_cpu.svn-base"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1949,"string":"1,949"},"score":{"kind":"number","value":3.1875,"string":"3.1875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\ncase $1 in\n\tuag|ips|fw|utm|rt|dpx|dlb|bsrg|dpx19k|nsw)\n\t\tCURRENT_BUILD_CPU='mips-xlr'\n\t\tCURRENT_BUILD_ARCH='mips'\n\t;;\n\tmips)\n\t\tCURRENT_BUILD_CPU='mips-xlr'\n\t\tCURRENT_BUILD_ARCH='mips'\n\t;;\n\tuag64|ips64|fw64|utm64|dpx64|dlb64)\n\t\tCURRENT_BUILD_CPU='mips64-xlr'\n\t\tCURRENT_BUILD_ARCH='mips64'\n\t;;\t\n\tmips64)\n\t\tCURRENT_BUILD_CPU='mips64-xlr'\n\t\tCURRENT_BUILD_ARCH='mips64'\n\t;;\n\tuagxlp|ipsxlp|fwxlp|utmxlp|dpxxlp|dlbxlp)\n\t\tCURRENT_BUILD_CPU='mips64-xlp'\n\t\tCURRENT_BUILD_ARCH='mips64'\n\t;;\n\tmips_xlp|mipsxlp|mips64_xlp|mips64xlp)\n\t\tCURRENT_BUILD_CPU='mips64-xlp'\n\t\tCURRENT_BUILD_ARCH='mips64'\n\t;;\n\tsrg|bsw|mips_bcm|mips-bcm)\n\t\tCURRENT_BUILD_CPU='mips-bcm'\n\t\tCURRENT_BUILD_ARCH='mips_bcm'\n\t;;\n\tlsw|ppc|powerpc)\n\t\tCURRENT_BUILD_CPU='powerpc'\n\t\tCURRENT_BUILD_ARCH='powerpc'\n\t;;\n\tx86|i386)\n\t\tCURRENT_BUILD_CPU='x86'\n\t\tCURRENT_BUILD_ARCH='x86'\n\t;;\n\t*)\n\t\techo \"Usage $0 [uag|fw|rt|ips|utm|dpx|dlb|lsw|bsw|nsw|srg|bsrg|x86]\"\n\t\techo \"\"\n exit 1\n\t;;\nesac\n\ncase $1 in\n\tuag|uag64|uagxlp)\n\t\tBUILD_PRODUCT_TYPE='uag'\n\t;;\n\tips|ips64|ipsxlp)\n\t\tBUILD_PRODUCT_TYPE='ips'\n\t;;\n\tfw|fw64|fwxlp)\n\t\tBUILD_PRODUCT_TYPE='fw'\n\t;;\n\tutm|utm64|utmxlp)\n\t\tBUILD_PRODUCT_TYPE='utm'\n\t;;\n\trt)\n\t\tBUILD_PRODUCT_TYPE='rt'\n\t;;\n\tdpx|dpx64|dpxxlp)\n\t\tBUILD_PRODUCT_TYPE='dpx'\n\t;;\n\tdpx19k)\n\t\tBUILD_PRODUCT_TYPE='dpx19k'\n\t;;\n\tdlb)\n\t\tBUILD_PRODUCT_TYPE='dlb'\n\t;;\n\tlsw)\n\t\tBUILD_PRODUCT_TYPE='lsw'\n\t;;\n\tbsw)\n\t\tBUILD_PRODUCT_TYPE='bsw'\n\t;;\n\tsrg)\n\t\tBUILD_PRODUCT_TYPE='srg'\n\t;;\n\tbsrg)\n\t\tBUILD_PRODUCT_TYPE='bsrg'\n\t;;\n\tnsw)\n\t\tBUILD_PRODUCT_TYPE='nsw'\n\t;;\t\n\tx86)\n\t\tBUILD_PRODUCT_TYPE='x86'\n\t;;\n\tmips|mips64|mipsxlp|mips-xlp|mips64xlp)\n\t\tBUILD_PRODUCT_TYPE='fw'\n\t;;\n\tppc|powerpc)\n\t\tBUILD_PRODUCT_TYPE='lsw'\n\t;;\n\t*)\n\t\techo \"Usage $0 [uag|fw|rt|ips|utm|dpx|dlb|lsw|bsw|nsw|srg|bsrg|x86]\"\n\t\techo \"\"\n\t\texit 1\n\t;;\nesac\n\nif [ -z \"$CURRENT_BUILD_ARCH\" ]; then\n\techo \"UNKNOW PRODUCT_TYPE OR COMPILER\"\n\techo \"\"\n\texit 1\nfi\n\nexport CURRENT_BUILD_CPU \nexport CURRENT_BUILD_ARCH\nexport BUILD_PRODUCT_TYPE\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":581,"cells":{"blob_id":{"kind":"string","value":"6ace5160ab7980b8533888730e4753c05478c4fd"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"mnsanghvi/cl-travis"},"path":{"kind":"string","value":"/install.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":8609,"string":"8,609"},"score":{"kind":"number","value":3.71875,"string":"3.71875"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#!/bin/sh\n# cl-travis install script. Don't remove this line.\nset -e\n\n# get \nget() {\n url=$1\n destination=$2\n echo \"Downloading ${url}...\"\n curl --no-progress-bar --retry 10 -o \"$destination\" -L \"$url\"\n}\n\n# unpack \nunpack() {\n opt=$1\n file=$2;\n destination=$3;\n\n echo \"Unpacking tarball $1 into $3...\"\n mkdir -p \"$destination\"\n tar -C \"$destination\" --strip-components=1 \"$opt\" -xf \"$file\"\n}\n\ninstall_i386_arch() {\n # Travis-CI's dpkg doesn't seem to know about --add-architecture.\n #sudo dpkg --add-architecture i386\n sudo apt-get install libc6:i386\n}\n\n# add_to_lisp_rc \nadd_to_lisp_rc() {\n string=$1\n case \"$LISP\" in\n abcl) rc=\".abclrc\" ;;\n allegro*) rc=\".clinit.cl\" ;;\n sbcl|sbcl32) rc=\".sbclrc\" ;;\n ccl|ccl32) rc=\".ccl-init.lisp\" ;;\n cmucl) rc=\".cmucl-init.lisp\" ;;\n clisp|clisp32) rc=\".clisprc.lisp\" ;;\n ecl) rc=\".eclrc\" ;;\n *)\n echo \"Unrecognised lisp: '$LISP'\"\n exit 1\n ;;\n esac\n echo \"$string\" >> \"$HOME/$rc\"\n}\n\n# version of ASDF known to work with cl-launch (3.0.2)\nASDF_URL=\"https://raw.githubusercontent.com/sbcl/sbcl/sbcl-1.1.17/contrib/asdf/asdf.lisp\"\nASDF_LOCATION=\"$HOME/asdf\"\n\ninstall_asdf() {\n get \"$ASDF_URL\" asdf.lisp\n add_to_lisp_rc \"(load \\\"$ASDF_LOCATION\\\")\"\n}\n\ncompile_asdf() {\n echo \"Compiling ASDF...\"\n cl-launch -i \"(compile-file \\\"$ASDF_LOCATION.lisp\\\")\"\n}\n\nCL_LAUNCH_URL=\"http://common-lisp.net/project/xcvb/cl-launch/cl-launch-4.0.3.tar.gz\"\nCL_LAUNCH_DIR=\"$HOME/cl-launch\"\nCL_LAUNCH_TARBALL=\"$HOME/cl-launch.tar.gz\"\nCL_LAUNCH_SCRIPT=\"/usr/local/bin/cl-launch\"\nCL_LAUNCH_RC=\"$HOME/.cl-launchrc\"\n\ndownload_cl_launch() {\n get \"$CL_LAUNCH_URL\" \"$CL_LAUNCH_TARBALL\"\n unpack -z \"$CL_LAUNCH_TARBALL\" \"$CL_LAUNCH_DIR\"\n}\n\n# install_cl_launch