{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \" >> $1.html;\n}\n\nhtmlify_python()\n{\n FILES=`\\ls $1/*.py`\n for i in $FILES\n do\n htmlify_python_file ${i}\n rm ${i}\n done\n}\n\n\nmakedocs ()\n{\n\n REVNUM_FILE=.logger_revnum\n\n\n\n LOGGER_REVNUM=`cat $REVNUM_FILE`\n\n XSLT_OPTIONS=\"--nodtdattr --nonet --novalid\"\n DATE_TODAY=`date --date= \"+%b %d, %Y\"`;\n\n\n if [ \"$1\" = \"makerel\" ] \n then\n RELEASE=${MAJOR_NUM}.${MINOR_NUM} \n else\n RELEASE=${MAJOR_NUM}.${MINOR_NUM}.${PATCH_NUM}\n fi;\n\n # get XML versions of the change logs\n echo Getting the git change logs for $LOGGER_REVNUM..HEAD \n git_logs_as_xml $LOGGER_REVNUM..HEAD docs/git-logs.xml || report_failure\n\n # grab a clean copy of the repository \n rm -rf docs/cache\n rm -rf docs/web\n rm -rf docs/chm/docs\n cd ..\n mkdir -p docs/docs/cache\n git archive HEAD | tar -xC docs/docs/cache\n cd docs\n rm -rf docs/cache/docs\n\n CHANGESET_ID=`git log -1 --pretty=format:%H`\n echo \"#ifndef DLIB_REVISION_H\" > docs/cache/dlib/revision.h\n echo \"// Version: \" $RELEASE >> docs/cache/dlib/revision.h\n echo \"// Date: \" `date` >> docs/cache/dlib/revision.h\n echo \"// Git Changeset ID: \" $CHANGESET_ID >> docs/cache/dlib/revision.h\n echo \"#define DLIB_MAJOR_VERSION \" $MAJOR_NUM >> docs/cache/dlib/revision.h\n echo \"#define DLIB_MINOR_VERSION \" $MINOR_NUM >> docs/cache/dlib/revision.h\n echo \"#define DLIB_PATCH_VERSION \" $PATCH_NUM >> docs/cache/dlib/revision.h\n echo \"#endif\" >> docs/cache/dlib/revision.h\n\n\n rm -rf docs/web\n rm -rf docs/chm/docs\n mkdir docs/web\n mkdir docs/chm/docs\n\n echo Creating HTML version of the source\n htmlify --title \"dlib C++ Library - \" -i docs/cache -o htmltemp.$$\n add_links_between_example_programs docs/cache/examples htmltemp.$$/examples cpp\n\n echo Copying files around...\n cp -r htmltemp.$$/dlib docs/web\n cp -r htmltemp.$$/dlib docs/chm/docs\n cp -r htmltemp.$$/examples/* docs/web\n cp -r htmltemp.$$/examples/* docs/chm/docs\n rm -rf htmltemp.$$\n\n # create python docs unless you say ./makedocs fast\n if [ \"$1\" != \"fast\" ] \n then\n cd ..\n python setup.py build || report_failure\n python setup.py build_sphinx -c docs/docs/python --build-dir docs/sphinx.$$ || report_failure\n # sphinx will read in the _dlib_pybind11 module and use that to name everything. But that's\n # not what we want, so we rename that to dlib everywhere. You would think sphinx would be\n # able to deal with the dlib/__init__.py file and this wouldn't be necessary, but that\n # doesn't seem to be the case.\n find docs/sphinx.$$ -type f | xargs sed -i -e \"s/_dlib_pybind11/dlib/g\"\n cd docs\n cp -r sphinx.$$/html docs/web/python\n mv sphinx.$$/html docs/chm/docs/python\n rm -rf sphinx.$$\n fi;\n\n\n cp docs/cache/dlib/test/makefile docs/web/dlib/test\n cp docs/cache/dlib/test/makefile docs/chm/docs/dlib/test\n\n cp docs/cache/dlib/test/CMakeLists.txt docs/web/dlib/test\n cp docs/cache/dlib/test/CMakeLists.txt docs/chm/docs/dlib/test\n cp docs/cache/dlib/CMakeLists.txt docs/web/dlib\n cp docs/cache/dlib/CMakeLists.txt docs/chm/docs/dlib\n mkdir docs/web/examples || report_failure\n cp docs/cache/examples/CMakeLists.txt docs/web/examples\n mkdir docs/chm/docs/examples || report_failure \n cp docs/cache/examples/CMakeLists.txt docs/chm/docs/examples\n cp docs/cache/python_examples/*.py docs/chm/docs/\n cp docs/cache/python_examples/*.py docs/web/\n\n htmlify_python docs/chm/docs/\n htmlify_python docs/web/\n add_links_between_example_programs docs/cache/python_examples docs/chm/docs py\n add_links_between_example_programs docs/cache/python_examples docs/web py\n\n cp docs/*.gif docs/web\n cp docs/*.gif docs/chm/docs\n cp docs/ml_guide.svg docs/web\n cp docs/ml_guide.svg docs/chm/docs\n cp -r docs/guipics docs/web\n cp -r docs/guipics docs/chm/docs\n cp -r docs/images docs/web\n cp -r docs/images docs/chm/docs\n cp docs/*.html docs/web\n cp docs/*.html docs/chm/docs\n cp docs/*.css docs/web\n cp docs/*.css docs/chm/docs\n cp docs/*.js docs/web\n cp docs/*.js docs/chm/docs\n cp docs/*.png docs/web\n cp docs/*.pdf docs/web\n cp docs/*.jpg docs/web\n cp docs/*.webm docs/web\n cp docs/*.ico docs/web\n cp docs/*.png docs/chm/docs\n cp docs/*.pdf docs/chm/docs\n cp docs/*.jpg docs/chm/docs\n cp docs/*.webm docs/chm/docs\n cp docs/*.ico docs/chm/docs\n\n cd docs/chm/docs || report_failure \n htmlify_cmake dlib/CMakeLists.txt;\n htmlify_cmake examples/CMakeLists.txt;\n htmlify_cmake dlib/test/CMakeLists.txt;\n cd ../../.. || report_failure\n cd docs/web || report_failure\n htmlify_cmake dlib/CMakeLists.txt;\n htmlify_cmake examples/CMakeLists.txt;\n htmlify_cmake dlib/test/CMakeLists.txt;\n cd ../.. || report_failure\n\n find docs/web docs/chm -name \"CMakeLists.txt\" | xargs rm\n\n\n\n # generate the HTML docs\n echo Generate HTML docs from XML and XSLT style sheet\n FILES=`\\ls docs/*.xml | grep -v main_menu.xml`\n for i in $FILES\n do\n\n # The last modified date for these files should always be the release date (regardless of when the actual xml files were modified). \n if [ \"${i}\" = \"docs/release_notes.xml\" -o ${i} = \"docs/old_release_notes.xml\" \\\n -o ${i} = \"docs/change_log.xml\" -o ${i} = \"docs/index.xml\" ] \n then\n DATE=$DATE_TODAY\n else\n get_last_modified_date ${i}\n DATE=$RESULT\n fi;\n\n #make web version\n cat docs/stylesheet.xsl | sed -e 's/\"is_chm\">[^<]*/\"is_chm\">false/' -e \"s/_CURRENT_RELEASE_/$RELEASE/\" -e \"s/_LAST_MODIFIED_DATE_/$DATE/\" \\\n > docs/stylesheet.$$.xsl\n OUT_FILE=$(echo ${i} | sed -e \"s/\\.xml/\\.html/\" | sed -e \"s/docs\\//docs\\/web\\//\")\n xsltproc $XSLT_OPTIONS -o $OUT_FILE docs/stylesheet.$$.xsl ${i}\n\n #make chm version\n cat docs/stylesheet.xsl | sed -e 's/\"is_chm\">[^<]*/\"is_chm\">true/' -e \"s/_CURRENT_RELEASE_/$RELEASE/\" -e \"s/_LAST_MODIFIED_DATE_/$DATE/\" \\\n > docs/stylesheet.$$.xsl\n OUT_FILE=$(echo ${i} | sed -e \"s/\\.xml/\\.html/\" | sed -e \"s/docs\\//docs\\/chm\\/docs\\//\")\n xsltproc $XSLT_OPTIONS -o $OUT_FILE docs/stylesheet.$$.xsl ${i}\n\n rm docs/stylesheet.$$.xsl\n done\n\n# Delete doc type header stuff\n# FILES=`find docs/chm docs/web -iname \"*.html\" -type f`\n# for i in $FILES\n# do\n# sed -e '/ temp.$$;\n# mv temp.$$ ${i};\n# done\n\n\n echo Generating sitemap\n cd docs/web || report_failure\n find . -name \"*.html\" | awk '{ print \"http://dlib.net\" substr($1,2)}' > sitemap.txt\n\n # make the main index have a 301 redirect. Use php to do this\n echo '' > index.php\n cat index.html >> index.php\n rm index.html\n\n cd ../..\n}\n\n\n./testenv || report_failure\n\n\n\n\n# build all the html documentation\nmakedocs $1\n\n# now make the table of contents for the chm file\necho Generating the table of contents for the chm file\nxsltproc -o docs/chm/Table\\ of\\ Contents.hhc docs/chm/htmlhelp_stylesheet.xsl docs/chm/toc.xml\n\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":196,"cells":{"blob_id":{"kind":"string","value":"b9bc90bfc6fde74f06779690ef0ef5cac7054699"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"stjordanis/rapids-compose"},"path":{"kind":"string","value":"/scripts/06-setup-python-intellisense.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2669,"string":"2,669"},"score":{"kind":"number","value":3.328125,"string":"3.328125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/usr/bin/env bash\n\nset -Eeo pipefail\n\nCOMPOSE_HOME=$(dirname $(realpath \"$0\"))\nCOMPOSE_HOME=$(realpath \"$COMPOSE_HOME/../\")\nRAPIDS_HOME=$(realpath \"$COMPOSE_HOME/../\")\n\ncd \"$RAPIDS_HOME\"\n\nPYTHON_DIRS=\"${PYTHON_DIRS:-rmm/python\n raft/python \\\n cuml/python\n cugraph/python\n cudf/python/cudf\n cudf/python/dask_cudf\n cuspatial/python/cuspatial}\"\n\ncat << EOF > \"$COMPOSE_HOME/etc/rapids/.vscode/python-settings.json\"\n{\n \"python.analysis.memory.keepLibraryAst\": true,\n \"python.analysis.memory.keepLibraryLocalVariables\": true,\n \"python.autoComplete.extraPaths\": [\n \"$RAPIDS_HOME/rmm/python\",\n \"$RAPIDS_HOME/raft/python\",\n \"$RAPIDS_HOME/cudf/python/cudf\",\n \"$RAPIDS_HOME/cudf/python/dask_cudf\",\n \"$RAPIDS_HOME/cuml/python\",\n \"$RAPIDS_HOME/cugraph/python\",\n \"$RAPIDS_HOME/cuspatial/python/cuspatial\",\n ],\n \"python.languageServer\": \"Pylance\",\n \"python.condaPath\": \"$COMPOSE_HOME/etc/conda/bin/conda\",\n \"python.pythonPath\": \"$COMPOSE_HOME/etc/conda/envs/rapids/bin/python\"\n}\nEOF\n\nfor PYDIR in $PYTHON_DIRS; do\n mkdir -p \"$RAPIDS_HOME/$PYDIR/.vscode\"\n # Symlink the python-settings.json file from compose/etc/rapids/\n ln -f -s \"$COMPOSE_HOME/etc/rapids/.vscode/python-settings.json\" \"$RAPIDS_HOME/$PYDIR/.vscode/settings.json\"\n cat << EOF > \"$RAPIDS_HOME/$PYDIR/.vscode/launch.json\"\n{\n \"version\": \"0.2.0\",\n \"configurations\": [\n {\n \"name\": \"$PYDIR\",\n \"type\": \"python\",\n \"request\": \"attach\",\n \"port\": 5678,\n \"host\": \"localhost\",\n \"pathMappings\": [{\n \"localRoot\": \"\\${workspaceFolder}\",\n \"remoteRoot\": \"\\${workspaceFolder}\"\n }]\n }\n ]\n}\nEOF\ndone\n\nask_before_install() {\n while true; do\n read -p \"$1 \" CHOICE \n\npkgname=facebookplugin\npkgver=1.0.1\npkgrel=1\npkgdesc='Facebook photo upload plugin.'\nurl='http://www.facebook.com'\narch=('i686')\ndepends=('libjpeg6' 'libpng12' 'libtiff4')\nlicense=('custom')\nsource=(http://www.facebook.com/fbplugin/linux-x86/install/FacebookPlugIn-${pkgver}.tar.gz)\nmd5sums=('e0134daf9dffef6c85ceb3209d812823')\n\nbuild() {\n cd ${srcdir}/FacebookPlugIn-${pkgver}\n\n install -d -m755 ${pkgdir}/usr/lib/mozilla/plugins/ || return 1\n install -m755 libnpfbook_1_0_1.so ${pkgdir}/usr/lib/mozilla/plugins/ || return 1\n install -d -m755 ${pkgdir}/usr/share/licenses/${pkgname}/ || return 1\n install -m644 LICENSE ${pkgdir}/usr/share/licenses/${pkgname}/LICENSE || return 1\n}"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":198,"cells":{"blob_id":{"kind":"string","value":"97b55a6dc4e9e64e795223c34678073409310ad0"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"spacelephant/www.spacelephant.org"},"path":{"kind":"string","value":"/publish_ghpages.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":969,"string":"969"},"score":{"kind":"number","value":3.703125,"string":"3.703125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\n\nPUBLISH_BRANCH=\"gh-pages\"\n\nif [[ $(git status -s) ]]\nthen\n echo \"The working directory is dirty. Please commit any pending changes.\"\n exit 1;\nfi\n\necho \"Deleting old publication\"\nrm -rf public\nmkdir public\ngit worktree prune\nrm -rf .git/worktrees/public/\n\necho \"Checking out ${PUBLISH_BRANCH} branch into public\"\ngit worktree add -B $PUBLISH_BRANCH public origin/$PUBLISH_BRANCH\n\necho \"Removing existing files\"\nrm -rf public/*\n\necho \"Generating site\"\nyarn build\n\necho \"Updating ${PUBLISH_BRANCH} branch\"\ncd public\necho www.spacelephant.org > CNAME\ngit add --all && git commit -m \"[CI]: Publishing master (publish.sh) | $(date -u '+%Y-%m-%d %H:%M:%S %Z')\"\n\necho \"Push to ${PUBLISH_BRANCH} branch\"\n\nwhile true; do\n read -p \"Do you really want to deploy that version on ${PUBLISH_BRANCH}?\" yn\n case $yn in\n [Yy]* ) git push origin ${PUBLISH_BRANCH}; break;;\n [Nn]* ) exit;;\n * ) echo \"Please answer yes or no.\";;\n esac\ndone\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":199,"cells":{"blob_id":{"kind":"string","value":"7395a514ebb4a1b90df86feadec8cea306d3f2cd"},"language":{"kind":"string","value":"Shell"},"repo_name":{"kind":"string","value":"wido/cloudstack-package-docker-deb"},"path":{"kind":"string","value":"/dists/ubuntu1404/build.sh"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":342,"string":"342"},"score":{"kind":"number","value":3.09375,"string":"3.09375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#!/bin/bash\nset -e\n\nif [ ! -d \"cloudstack\" ]; then\n echo \"Could not find directory 'cloudstack'\"\n exit 1\nfi\n\ncd cloudstack\n\nVERSION=$(grep '^ ' pom.xml| cut -d'>' -f2 |cut -d'<' -f1)\nDISTCODE=$(lsb_release -sc)\n\ndch -b -v ${VERSION}~${DISTCODE} -u low -m \"Apache CloudStack Release $VERSION\"\n\ndpkg-buildpackage -j2 -b -uc -us\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":1,"numItemsPerPage":100,"numTotalItems":4133544,"offset":100,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjE0ODQzMywic3ViIjoiL2RhdGFzZXRzL2hvbmdsaXU5OTAzL3N0YWNrX2VkdV9zaGVsbCIsImV4cCI6MTc1NjE1MjAzMywiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.JCr75p6JsTWgAAZXX9A2-ayZ0w3114fKSaGp9mQX8VsPlcaNVLC4ddajpNt_Izu6SrzQzb6SR65XJaDh7jpbAA","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
4
115
path
stringlengths
2
970
src_encoding
stringclasses
28 values
length_bytes
int64
31
5.38M
score
float64
2.52
5.28
int_score
int64
3
5
detected_licenses
listlengths
0
161
license_type
stringclasses
2 values
text
stringlengths
31
5.39M
download_success
bool
1 class
20a69da341fed2f26c681c60338fb26220e35ac8
Shell
agangzz/jukemir
/reproduce/0_docker.sh
UTF-8
339
2.734375
3
[]
no_license
pushd .. set -e HOST_CACHE=$(python -c "from jukemir import CACHE_DIR; print(CACHE_DIR)") echo $HOST_CACHE popd docker run \ -it \ --rm \ -d \ --name jukemir \ -u $(id -u):$(id -g) \ -v $HOST_CACHE:/jukemir/cache \ -v $(pwd)/../jukemir:/jukemir/jukemir \ -v $(pwd)/../reproduce:/jukemir/reproduce \ jukemir/lib \ bash
true
963890a6a4df4c9d77cf7c0616ff4501737a3d00
Shell
ghuntley/monorepo
/third_party/git/t/t3304-notes-mixed.sh
UTF-8
4,256
3.21875
3
[ "GPL-1.0-or-later", "LGPL-2.0-or-later", "LGPL-2.1-only", "GPL-3.0-only", "GPL-2.0-only", "MIT" ]
permissive
#!/bin/sh test_description='Test notes trees that also contain non-notes' . ./test-lib.sh number_of_commits=100 start_note_commit () { test_tick && cat <<INPUT_END commit refs/notes/commits committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE data <<COMMIT notes COMMIT from refs/notes/commits^0 deleteall INPUT_END } verify_notes () { git log | grep "^ " > output && i=$number_of_commits && while [ $i -gt 0 ]; do echo " commit #$i" && echo " note for commit #$i" && i=$(($i-1)); done > expect && test_cmp expect output } test_expect_success "setup: create a couple of commits" ' test_tick && cat <<INPUT_END >input && commit refs/heads/master committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE data <<COMMIT commit #1 COMMIT M 644 inline file data <<EOF file in commit #1 EOF INPUT_END test_tick && cat <<INPUT_END >>input && commit refs/heads/master committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE data <<COMMIT commit #2 COMMIT M 644 inline file data <<EOF file in commit #2 EOF INPUT_END git fast-import --quiet <input ' test_expect_success "create a notes tree with both notes and non-notes" ' commit1=$(git rev-parse refs/heads/master^) && commit2=$(git rev-parse refs/heads/master) && test_tick && cat <<INPUT_END >input && commit refs/notes/commits committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE data <<COMMIT notes commit #1 COMMIT N inline $commit1 data <<EOF note for commit #1 EOF N inline $commit2 data <<EOF note for commit #2 EOF INPUT_END test_tick && cat <<INPUT_END >>input && commit refs/notes/commits committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE data <<COMMIT notes commit #2 COMMIT M 644 inline foobar/non-note.txt data <<EOF A non-note in a notes tree EOF N inline $commit2 data <<EOF edited note for commit #2 EOF INPUT_END test_tick && cat <<INPUT_END >>input && commit refs/notes/commits committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE data <<COMMIT notes commit #3 COMMIT N inline $commit1 data <<EOF edited note for commit #1 EOF M 644 inline deadbeef data <<EOF non-note with SHA1-like name EOF M 644 inline de/adbeef data <<EOF another non-note with SHA1-like name EOF M 644 inline de/adbeefdeadbeefdeadbeefdeadbeefdeadbeef data <<EOF This is actually a valid note, albeit to a non-existing object. It is needed in order to trigger the "mishandling" of the dead/beef non-note. EOF M 644 inline dead/beef data <<EOF yet another non-note with SHA1-like name EOF INPUT_END git fast-import --quiet <input && git config core.notesRef refs/notes/commits ' cat >expect <<EXPECT_END commit #2 edited note for commit #2 commit #1 edited note for commit #1 EXPECT_END test_expect_success "verify contents of notes" ' git log | grep "^ " > actual && test_cmp expect actual ' cat >expect_nn1 <<EXPECT_END A non-note in a notes tree EXPECT_END cat >expect_nn2 <<EXPECT_END non-note with SHA1-like name EXPECT_END cat >expect_nn3 <<EXPECT_END another non-note with SHA1-like name EXPECT_END cat >expect_nn4 <<EXPECT_END yet another non-note with SHA1-like name EXPECT_END test_expect_success "verify contents of non-notes" ' git cat-file -p refs/notes/commits:foobar/non-note.txt > actual_nn1 && test_cmp expect_nn1 actual_nn1 && git cat-file -p refs/notes/commits:deadbeef > actual_nn2 && test_cmp expect_nn2 actual_nn2 && git cat-file -p refs/notes/commits:de/adbeef > actual_nn3 && test_cmp expect_nn3 actual_nn3 && git cat-file -p refs/notes/commits:dead/beef > actual_nn4 && test_cmp expect_nn4 actual_nn4 ' test_expect_success "git-notes preserves non-notes" ' test_tick && git notes add -f -m "foo bar" ' test_expect_success "verify contents of non-notes after git-notes" ' git cat-file -p refs/notes/commits:foobar/non-note.txt > actual_nn1 && test_cmp expect_nn1 actual_nn1 && git cat-file -p refs/notes/commits:deadbeef > actual_nn2 && test_cmp expect_nn2 actual_nn2 && git cat-file -p refs/notes/commits:de/adbeef > actual_nn3 && test_cmp expect_nn3 actual_nn3 && git cat-file -p refs/notes/commits:dead/beef > actual_nn4 && test_cmp expect_nn4 actual_nn4 ' test_done
true
b2dad0bcd9e9d99ed296b39dcefcef5a72deff43
Shell
clim-ability/dockerCRE
/creStart.sh
UTF-8
613
2.828125
3
[ "Apache-2.0" ]
permissive
#!/bin/bash ./creStop.sh #Update container echo "Update CRE container" # docker pull tamboraorg/creproxy:clim16n # docker pull tamboraorg/crephp:clim16n # docker pull tamboraorg/crenginxphp:clim16n # docker pull tamboraorg/crepostgis:clim16n # docker pull tamboraorg/cremysql:clim16n # docker pull tamboraorg/creglue:clim16n #Run containers echo "Start CRE container" REMDIR="$PWD" cd /data/dockerCRE docker-compose --file dc-clim16n.yml up -d cd $REMDIR #Show status echo "$(docker ps -a)" echo "$(ls -l /data/dockerCRE/volumes/postgres/dumps/*/*)" #Cleanup echo "Cleanup containers" docker system prune -f
true
5bf5fb23dec7e55d591c265dc36a89f38341d855
Shell
MFornander/kingjelly
/scripts/patchdtb.sh
UTF-8
344
2.609375
3
[ "MIT" ]
permissive
#!/bin/bash ### Patch a Debian DTB to enable the PRUs set -e LIVE="/boot/dtbs/3.8.13-bone59/am335x-boneblack" TEMP="debian-`uname -r`" ### FETCH DTB to DTS #dtc -O dts -o $TEMP.dts -I dtb $LIVE.dtb ### PATCH DTS #(sed todo) ### STORE DTS to DTB dtc -O dtb -o $TEMP.dtb -I dts $TEMP.dts cp -n $LIVE.dtb $LIVE.dtb.old cp $TEMP.dtb $LIVE.dtb
true
91deddca24f54b8457eae1bebc27b218460a8b17
Shell
krzykli/dotfiles
/.fzf.zsh
UTF-8
575
2.5625
3
[]
no_license
# Setup fzf # --------- if [[ ! "$PATH" == */usr/local/opt/fzf/bin* ]]; then export PATH="${PATH:+${PATH}:}/usr/local/opt/fzf/bin" fi # Auto-completion # --------------- [[ $- == *i* ]] && source "/usr/local/opt/fzf/shell/completion.zsh" 2> /dev/null # Key bindings # ------------ source "/usr/local/opt/fzf/shell/key-bindings.zsh" # fzf theme export FZF_DEFAULT_OPTS=$FZF_DEFAULT_OPTS' --color=fg:-1,bg:-1,hl:#6297cc --color=fg+:#e86666,bg+:#262626,hl+:#5fd7ff --color=info:#afaf87,prompt:#3dcfff,pointer:#af5fff --color=marker:#87ff00,spinner:#af5fff,header:#87afaf'
true
1f84ef3986af78600833d4a0b20b74fa559bdda0
Shell
FauxFaux/debian-control
/i/icecast2/icecast2_2.4.3-3_amd64/postrm
UTF-8
1,086
3.40625
3
[]
no_license
#! /bin/sh # postrm script for icecast2 set -e case "$1" in purge) rm -rf /var/log/icecast2 deluser --system --quiet icecast2 || true delgroup --system --quiet --only-if-empty icecast || true ;; remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear) ;; *) echo "postrm called with unknown argument \`$1'" >&2 exit 1 esac # Automatically added by dh_installinit/11.3.5 if [ "$1" = "purge" ] ; then update-rc.d icecast2 remove >/dev/null fi # In case this system is running systemd, we make systemd reload the unit files # to pick up changes. if [ -d /run/systemd/system ] ; then systemctl --system daemon-reload >/dev/null || true fi # End automatically added section # Automatically added by dh_installdeb/11.3.5 dpkg-maintscript-helper rm_conffile /etc/icecast2/web/status2.xsl 2.4.0-1\~ icecast2 -- "$@" # End automatically added section # Automatically added by dh_installdebconf/11.3.5 if [ "$1" = purge ] && [ -e /usr/share/debconf/confmodule ]; then . /usr/share/debconf/confmodule db_purge fi # End automatically added section exit 0
true
d6f673b30ea78b56821f18e14f1abf79bbedd51d
Shell
gelisam/dotfiles
/bin-common/grex
UTF-8
1,098
3.78125
4
[]
no_license
#!/usr/bin/env bash # a wrapper around grep which make it easy to only search within a folder with # "grex src" or only Haskell files with "grex .hs" declare -a DIR declare -a FIND_OPTIONS while [ -d "$1" ]; do DIR[${#DIR[@]}]="$1"; shift done if [ "${#DIR[@]}" -eq 0 ]; then DIR="." fi if [ "$(basename "$0")" = "grepp" ]; then FIND_OPTIONS[${#FIND_OPTIONS[@]}]="-name" FIND_OPTIONS[${#FIND_OPTIONS[@]}]="*.cpp" fi if [ "$(echo "$1" | cut -c1)" = "." ]; then FIND_OPTIONS[${#FIND_OPTIONS[@]}]="-name" FIND_OPTIONS[${#FIND_OPTIONS[@]}]="*$1"; shift fi if [ "$1" = "-name" ]; then FIND_OPTIONS[${#FIND_OPTIONS[@]}]="$1"; shift FIND_OPTIONS[${#FIND_OPTIONS[@]}]="$1"; shift fi if [ "$1" = "-iname" ]; then FIND_OPTIONS[${#FIND_OPTIONS[@]}]="$1"; shift FIND_OPTIONS[${#FIND_OPTIONS[@]}]="$1"; shift fi if [ "$1" = "-newer" ]; then FIND_OPTIONS[${#FIND_OPTIONS[@]}]="$1"; shift FIND_OPTIONS[${#FIND_OPTIONS[@]}]="$1"; shift fi find "${DIR[@]}" -name '\.git' -prune -o -name '\.stack-work' -prune -o -type f "${FIND_OPTIONS[@]}" -print | tr '\n' '\0' | xargs -0 grep "$@"
true
6e6874272c153174ec30da0d6832736d4313c287
Shell
bakaut/glaber
/glaber.sh
UTF-8
7,275
3.546875
4
[]
no_license
#!/usr/bin/env bash set -e # functions apitest () { info "Install hurl for testing glaber" [ -d ".tmp/hurl-$HURL_VERSION" ] || \ curl -sL https://github.com/Orange-OpenSource/hurl/releases/download/\ $HURL_VERSION/hurl-$HURL_VERSION-x86_64-linux.tar.gz | \ tar xvz -C .tmp/ 1>/dev/null info "Testing that glaber-server is runing" .tmp/hurl-$HURL_VERSION/hurl -o .tmp/hurl.log \ --variables-file=.github/workflows/test/.hurl \ --retry --retry-max-count 20 --retry-interval 15000 \ .github/workflows/test/glaber-runing.hurl } diag () { info "Collect glaber logs" docker-compose logs --no-color clickhouse > .tmp/diag/clickhouse.log || true docker-compose logs --no-color mysql > .tmp/diag/mysql.log || true docker-compose logs --no-color glaber-nginx > .tmp/diag/glaber-nginx.log || true docker-compose logs --no-color glaber-server > .tmp/diag/glaber-server.log || true docker-compose ps > .tmp/diag/ps.log info "Collect geneal information about system and docker" uname -a > .tmp/diag/uname.log git log -1 --stat > .tmp/diag/last-commit.log cat /etc/os-release > .tmp/diag/os-release free -m > .tmp/diag/mem.log df -h > .tmp/diag/disk.log docker-compose -version > .tmp/diag/docker-compose-version.log docker --version > .tmp/diag/docker-version.log docker info > .tmp/diag/docker-info.log info "Add diagnostic information to .tmp/diag/diag.zip" zip -r .tmp/diag/diag.zip .tmp/diag/ 1>/dev/null info "Fill free to create issue https://github.com/bakaut/glaber/issues/new" info "And attach .tmp/diag/diag.zip to it" } git-reset-variables-files () { git checkout HEAD -- clickhouse/users.xml git checkout HEAD -- .env } info () { local message=$1 echo $(date --rfc-3339=seconds) $message } wait () { info "Waiting zabbix to start..." apitest && info "Success" && info "$(cat .zbxweb)" && exit 0 || \ docker-compose logs --no-color && \ curl http://127.0.1.1:${ZBX_PORT:-80} || true && \ info "Please try to open zabbix url with credentials:" && \ info "$(cat .zbxweb)" && \ info "If not success, please run diagnostics ./glaber.sh diag" && \ info "Zabbix start failed.Timeout 5 minutes reached" && \ exit 1 } set-passwords() { gen-password() { < /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c12 } make-bcrypt-hash() { htpasswd -bnBC 10 "" $1 | tail -c 55 } if [ ! -f .passwords.created ]; then git-reset-variables-files echo "GLABER_TAG=$GLABER_TAG" >> .env source .env ZBX_CH_PASS=$(gen-password) ZBX_WEB_ADMIN_PASS=$(gen-password) sed -i -e "s/MYSQL_PASSWORD=.*/MYSQL_PASSWORD=$(gen-password)/" \ -e "s/ZBX_CH_PASS=.*/ZBX_CH_PASS=$ZBX_CH_PASS/" \ -e "s/MYSQL_ROOT_PASSWORD=.*/MYSQL_ROOT_PASSWORD=$(gen-password)/" \ .env [[ ! -f mysql/create.sql ]] && \ wget -q https://storage.yandexcloud.net/glaber/repo/$GLABER_VERSION-create-mysql.sql.tar.gz -O - | tar -xz && \ mv create.sql mysql/create.sql echo "use MYSQL_DATABASE;" >> mysql/create.sql echo "update users set passwd='\$2y\$10\$ZBX_WEB_ADMIN_PASS' where username='Admin';" >> mysql/create.sql ZBX_WEB_ADMIN_PASS_HASH=$(make-bcrypt-hash $ZBX_WEB_ADMIN_PASS) sed -i -e "s/MYSQL_DATABASE/$MYSQL_DATABASE/" \ -e "s/ZBX_WEB_ADMIN_PASS/$ZBX_WEB_ADMIN_PASS_HASH/" \ mysql/create.sql sed -i -e "s/<password>.*<\/password>/<password>$ZBX_CH_PASS<\/password>/" \ -e "s/10000000000/$ZBX_CH_CONFIG_MAX_MEMORY_USAGE/" \ -e "s/defaultuser/$ZBX_CH_USER/" \ clickhouse/users.xml sed -i -e "s/3G/$MYSQL_CONFIG_INNODB_BUFFER_POOL_SIZE/" \ mysql/etc/my.cnf.d/innodb.conf echo "user=Admin" > .github/workflows/test/.hurl echo "pass=$ZBX_WEB_ADMIN_PASS" >> .github/workflows/test/.hurl echo "port=${ZBX_PORT:-80}" >> .github/workflows/test/.hurl touch .passwords.created echo "Zabbix web access http://127.0.1.1:${ZBX_PORT:-80} Admin $ZBX_WEB_ADMIN_PASS" > .zbxweb fi } usage() { echo "Usage: $0 <action>" echo echo "$0 build - Build docker images" echo "$0 start - Build docker images and start glaber" echo "$0 stop - Stop glaber containers" echo "$0 recreate - Completely remove glaber and start it again" echo "$0 remote - Remote rebuild github glaber images (only admins)" echo "$0 diag - Collect glaber start and some base system info to the file" } build() { [ -d "glaber-server/workers_script/" ] || mkdir -p glaber-server/workers_script/ [ -d ".tmp/diag/" ] || mkdir -p .tmp/diag/ [ -d ".mysql/mysql_data/" ] || \ sudo install -d -o 1001 -g 1001 mysql/mysql_data/ [ -d ".clickhouse/clickhouse_data/" ] || \ sudo install -d -o 101 -g 103 clickhouse/clickhouse_data docker-compose pull 1>.tmp/diag/docker-build.log docker-compose build $args 1>.tmp/diag/docker-build.log } start() { set-passwords build docker-compose up -d wait } stop() { docker-compose down } remove() { docker-compose down read -p "Are you sure to completely remove glaber with database [y/n] ? " -n 1 -r echo if [[ $REPLY =~ ^[Yy]$ ]] then rm .passwords.created .zbxweb .github/workflows/test/.hurl || true sudo rm -rf mysql/mysql_data/ clickhouse/clickhouse_data mysql/create.sql git-reset-variables-files fi } recreate() { remove start } remote-docker() { current_branch=$(git rev-parse --abbrev-ref HEAD) tag=$GLABER_VERSION-$(date '+%Y-%m-%d-%H-%M') git-reset-variables-files git add . git commit -m "build auto commit" git checkout -b build/$tag git push --set-upstream origin build/$tag git checkout $current_branch echo -n "Pushed to remote build branch" } remote-packer() { current_branch=$(git rev-parse --abbrev-ref HEAD) tag=$GLABER_VERSION-$(date '+%Y-%m-%d-%H-%M') git-reset-variables-files git add . git commit -m "build auto commit" git checkout -b packer/$tag git push --set-upstream origin packer/$tag git checkout $current_branch echo -n "Pushed to remote packer branch" } # variables HURL_VERSION="1.8.0" # export ZBX_PORT=8050 # Getting latest tag on git repository (latest stable 2 version of glaber) export GLABER_TAG=$(git ls-remote --refs --sort='version:refname' --tags \ https://gitlab.com/mikler/glaber.git origin '2.*' | \ tail --lines=1 | cut --delimiter='/' --fields=3) export GLABER_VERSION=$(curl -s https://gitlab.com/mikler/glaber/-/raw/${GLABER_TAG}/include/version.h | \ grep GLABER_VERSION | tr -dc 0-9.) export args=" --build-arg GLABER_VERSION=$GLABER_VERSION" # main part [ $# -ne 1 ] && (usage && exit 1) # Check whether docker-compose is installed command -v docker-compose >/dev/null 2>&1 || \ { echo >&2 "docker-compose is required, please install it and start over. Aborting."; exit 1; } # Check whether htpasswd is installed command -v htpasswd >/dev/null 2>&1 || \ { echo >&2 "htpasswd is required(apache2-utils), please install it and start over. Aborting."; exit 1; } case $1 in build) build ;; start) start ;; stop) stop ;; recreate) recreate ;; remove) remove ;; remote-docker) remote-docker ;; remote-packer) remote-packer ;; diag) diag ;; test) apitest ;; *) echo -n "unknown command" ;; esac
true
826949aa501b18d99a0bd8850ad810de4e443ca0
Shell
wangzhhe/bcmcontrol
/autobuild.sh
UTF-8
155
2.765625
3
[]
no_license
if [ ! -d "./build" ];then echo "Create build folder..." mkdir build else rm -r build echo "Create build folder..." mkdir build fi cd build cmake .. make
true
95de2127d5848ca8d975d91c1ec55b30d08e07b7
Shell
m-rodin/dst-stu
/src/2/2.2.1.sh
UTF-8
841
2.53125
3
[]
no_license
#!/bin/bash psql -h 192.168.3.245 -p 5432 -U easycar -W easycar -c " CREATE OR REPLACE PROCEDURE drop_duplicates() LANGUAGE plpgsql AS \$\$ DECLARE original record; duplicate record; completed INT[] DEFAULT '{}'; BEGIN for original in (select * from client) loop for duplicate in (SELECT * FROM client WHERE client_id != original.client_id AND firstName = original.firstName AND lastName = original.lastName AND birthday = original.birthday AND NOT client_id = ANY(completed)) loop UPDATE rent SET client_id = original.client_id WHERE client_id = duplicate.client_id; DELETE FROM client WHERE client_id = duplicate.client_id; end loop; completed := array_append(completed, original.client_id); end loop; END; \$\$ ;" psql -h 192.168.3.245 -p 5432 -U easycar -W easycar -c "CALL drop_duplicates()"
true
a1615a3361678e7b33def5848767431aaa9cbf51
Shell
demsheng/wxWidgets-example
/chap20/install/maketarball.sh
UTF-8
7,951
3.625
4
[]
no_license
#!/bin/sh # Make a distribution of AcmeApp for Linux and Mac # (c) Julian Smart # Usage: maketarball.sh [ options ] # For example: maketarball.sh --build PROGNAME=$0 CVSUPDATE=0 COPYTOFTP=0 # Clean objects before build CLEANOBJECTS=1 DEBUGBUILD=0 RELEASEBUILD=0 NOTAR=0 CLEANWX=0 # Will be computed VERSION=1.00 # This is i686 for Linux PLATFORM=`arch` # Read Linux or Mac variables if [ "$PLATFORM" = "i686" ]; then . setup_unix.var else . setup_mac.var fi # Find the version from symbols.h find_version() { echo "Finding version..." echo "#include <stdio.h>" > /tmp/appver.c echo "#include \"symbols.h\"" >> /tmp/appver.c echo "int main() { printf(\"%.2f\", VERSION_NUMBER); }" >> /tmp/appver.c gcc /tmp/appver.c -I$SRCDIR -o /tmp/appver VERSION=`/tmp/appver` echo "Version is "$VERSION } # Get the latest from the CVS server update_from_cvs() { # Update your project here cd $PROJECTDIR cvs update -d -P } # Copy to public ftp site copy_to_ftp_site() { if [ "$PLATFORM" = "i686" ]; then echo Copying tarball AcmeApp-$VERSION-i386.tar.gz to ftp site... curl ftp://www.acmecorp.com/public_html/AcmeApp-$VERSION-i386.tar.gz --user "username:password" --upload-file $DESTDIR/AcmeApp-$VERSION-i386.tar.gz else echo Copying tarball AcmeApp-$VERSION.dmg to ftp site... curl ftp://www.anthemion.co.uk/public_html/acmeapp/AcmeApp-$VERSION.dmg --user "username:password" --upload-file $DESTDIR/AcmeApp-$VERSION.dmg fi } # Test out the installation install_app() { echo Installing from $DESTDIR... rm -f -r $DESTDIR/tmp mkdir $DESTDIR/tmp cd $DESTDIR/tmp tar xvfz ../AcmeApp-$VERSION-i386.tar.gz ./installacme } # Make the tarball make_linux_dist() { rm -f -r $DESTDIR/* rm -f $DESTDIR/* mkdir -p $DESTDIR/AcmeApp-$VERSION cd $DESTDIR/AcmeApp-$VERSION cp $APPDIR/docs/readme-unix.txt readme.txt cp $APPDIR/docs/readme-unix.txt ../readme.txt cp $APPDIR/docs/license.txt . cp $APPDIR/src/bitmaps/acmeapp32x32.xpm . cp $APPBUILDDIR/acmeapp . echo Copying manuals etcetera... cp $APPDIR/manual/acmeapp.htb . mkdir -p Samples echo Copying sample files... cp -r $APPDIR/Samples . cd $DESTDIR/AcmeApp-$VERSION echo Zipping up resources... cp -r $APPDIR/resources resources cd $DESTDIR/AcmeApp-$VERSION/resources rm -f ../acmeapp.bin zip -q ../acmeapp.bin * cd .. rm -f -r resources mv acmeapp.bin $DESTDIR/AcmeApp-$VERSION cd $DESTDIR/AcmeApp-$VERSION strip acmeapp if [ "$UPX" != "0" ]; then upx acmeapp fi ###################### COPY SCRIPTS # Modify the installer script to know about the version sed "s/__ACMEVERSION__/$VERSION/g" < $APPDIR/scripts/installacme > $DESTDIR/installacme chmod a+x $DESTDIR/installacme ###################### TAR UP WRITER'S CAFE # We archive this data archive inside the outer archive # _without_ a versioned folder, so it can be put in the # destination folder of choice. cd $DESTDIR/AcmeApp-$VERSION tar zcvf $DESTDIR/AcmeAppData.tar.gz * cd $DESTDIR tar zcvf AcmeApp-$VERSION-i386.tar.gz AcmeAppData.tar.gz installacme rm -f AcmeAppData.tar.gz } make_mac_dist() { mkdir -p $DESTDIR cd $DESTDIR echo Removing old files... rm -f -r $DESTDIR/AcmeApp* rm -f *.dmg echo Copying AcmeApp.app... mkdir -p $DESTDIR/AcmeApp-$VERSION ditto --rsrc $APPBUILDDIR/AcmeApp.app $DESTDIR/AcmeApp-$VERSION/AcmeApp.app strip $DESTDIR/AcmeApp-$VERSION/AcmeApp.app/Contents/MacOS/AcmeApp cd $DESTDIR/AcmeApp-$VERSION echo Copying readme files... cp $APPDIR/docs/readme-mac.txt readme.txt cp $APPDIR/docs/license.txt . echo Copying manuals etcetera... cp $APPDIR/manual/acmeapp.htb AcmeApp.app/Contents/MacOS echo Copying samples... cp -r $APPDIR/Samples . echo Zipping binary resource file... cd $DESTDIR/AcmeApp-$VERSION cp -r $APPDIR/resources resources cd $DESTDIR/AcmeApp-$VERSION/resources rm -f ../acmeapp.bin zip -q ../acmeapp.bin * cd .. mv acmeapp.bin $DESTDIR/AcmeApp-$VERSION/AcmeApp.app/Contents/MacOS rm -f -r resources cd $DESTDIR ############################### FINISH OFF THE DISTRIBUTION echo Making a disk image... hdiutil create AcmeApp-$VERSION.dmg -volname AcmeApp-$VERSION -type UDIF -megabytes 50 -fs HFS+ echo Mounting the disk image... MYDEV=`hdiutil attach AcmeApp-$VERSION.dmg | tail -n 1 | awk '{print $1'}` echo Device is $MYDEV echo Copying AcmeApp to the disk image... ditto --rsrc AcmeApp-$VERSION /Volumes/AcmeApp-$VERSION/AcmeApp-$VERSION echo Unmounting the disk image... hdiutil detach $MYDEV echo Compressing the disk image... hdiutil convert AcmeApp-$VERSION.dmg -format UDZO -o AcmeApp-$VERSION-compressed.dmg echo Internet enabling the disk image... hdiutil internet-enable AcmeApp-$VERSION-compressed.dmg echo Renaming compressed image... rm -f AcmeApp-$VERSION.dmg mv AcmeApp-$VERSION-compressed.dmg AcmeApp-$VERSION.dmg } # Build all Acme App applications do_build() { cd $BUILDDIR # Insert build code here } do_build_debug() { cd $BUILDDIR # Insert build code here } usage() { echo "Makes tarballs and optionally rebuilds Acme App applications." echo "Usage: $PROGNAME [ options ]" echo Options: echo " --help Display this help message" echo " --build Cleans and builds Acme App first" echo " --debug-build Cleans and debug-builds Acme App first" echo " --cvs-update Update from CVS first" echo " --ftp Copy to ftp site" echo " --noclean Resume build without cleaning objects" echo " --cleanwx Clean wxWidgets build first" echo " --install Install after building the distribution" echo " --notar Omit the distribution creation" exit 1 } # Process command line options. for i in "$@"; do case "$i" in --build) RELEASEBUILD=1 ;; --debug-build) DEBUGBUILD=1 ;; --cvs-update) CVSUPDATE=1 ;; --ftp) COPYTOFTP=1 ;; --noclean) CLEANOBJECTS=0 ;; --cleanwx) CLEANWX=1 ;; --notar) NOTAR=1 ;; --install) INSTALLWC=1 ;; *) usage exit ;; esac done if [ "$CVSUPDATE" = "1" ]; then update_from_cvs fi find_version if [ ! -d "$DESTDIR" ]; then mkdir -p $DESTDIR fi if [ ! -d "$SRCDIR" ]; then echo Source directory $SRCDIR not found. usage exit 1 fi find_version if [ "$NOTAR" = "0" ]; then echo Creating Version $VERSION distribution in $DESTDIR, using source directory $SRCDIR and acmeapp binary in $APPBUILDDIR. # Remove all existing files if [ ! -d "$DESTDIR/acmeapp" ]; then rm -f -r $DESTDIR/acmeapp fi fi if [ "$CLEANWX" = "1" ]; then clean_wx fi if [ "$RELEASEBUILD" = "1" ] || [ "$BUILDDESKONLY" = "1" ]; then do_build fi if [ "$DEBUGBUILD" = "1" ]; then do_build_debug fi if [ ! -d "$APPBUILDDIR" ]; then echo Location of acmeapp binary $APPBUILDDIR not found. usage exit 1 fi if [ ! -f $APPBINARY ]; then echo "*** Sorry, Acme was not built correctly." exit fi if [ "$NOTAR" = "0" ]; then if [ "$PLATFORM" = "i686" ]; then make_linux_dist else make_mac_dist fi fi if [ "$COPYTOFTP" = "1" ]; then copy_to_ftp_site fi if [ "$INSTALLWC" = "1" ]; then install_app fi echo AcmeApp archived.
true
c3c20ef29d26ec1733e1a0ba1b67448545d5729b
Shell
ebahsini/modules.tf-lambda
/templates/terragrunt-common-layer/template/common/scripts/update_dynamic_values_in_tfvars.sh
UTF-8
2,692
4.34375
4
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#!/bin/bash ############################ # This script is used by Terragrunt hook to find and replace references in terraform.tfvars with the real values fetched using `terragrunt output` ############################ # 1. check if this script has not been disabled using special flag (@modulestf:disable_values_updates) # 2. get list of things to replace # 3. go through the list of things to replace # 4. get actual value # 5. replace value in terraform.tfvars: # a. When `to_list` is specified the type of the value will be converted to a list - ["sg-1234"] ############################ readonly tfvars_file="$1/terraform.tfvars" readonly parent_dir="$1/../" readonly terragrunt_working_dir=$(dirname $(find "$1/.terragrunt-cache" -type d -name ".terraform")) echo "parent_dir=$parent_dir" echo "TERRAGRUNT_WORKING_DIR=$terragrunt_working_dir" readonly modulestf_disable_values_updates_flag="@modulestf:disable_values_updates" readonly modulestf_terraform_output_prefix="@modulestf:terraform_output" ############################ if $(grep -q "$modulestf_disable_values_updates_flag" "$tfvars_file"); then echo "Dynamic update has been disabled in terraform.tfvars" exit 0 fi # Sample keys: # @modulestf:terraform_output.security-group_5.this_security_group_id - the type of the value will not be modified # @modulestf:terraform_output.security-group_5.this_security_group_id.to_list - the type of the value will be converted to list keys_to_replace=($(grep -oh "$modulestf_terraform_output_prefix\.[^ ]*" "$tfvars_file" | sort | uniq)) for key_to_replace in "${keys_to_replace[@]}"; do dir_name=$(cut -d "." -f 2 <<< "$key_to_replace") output_name=$(cut -d "." -f 3 <<< "$key_to_replace") convert_to_type=$(cut -d "." -f 4 <<< "$key_to_replace") cd "${parent_dir}/${dir_name}" item=$(terragrunt output -json "$output_name") exit_code=$? if [[ "$exit_code" != "0" ]]; then echo "Can't update value of $key_to_replace in $tfvars_file because key "$output_name" does not exist in output" continue fi item_type=$(echo "$item" | jq -rc ".type") item_value=$(echo "$item" | jq -rc ".value") if [[ "$item_type" == "string" ]]; then item_value="\"$item_value\"" fi if [[ "$convert_to_type" == "to_list" ]]; then item_value="[$item_value]" fi echo "Updating value of $key_to_replace with $item_value in $tfvars_file" # set -x # print command: on sed -i -r "s|^(.+) =.+(\#)+(.*)${key_to_replace}(.*)|\1 = ${item_value} \2\3${key_to_replace}\4|g" "$tfvars_file" # set +x # print command: off echo "Copying updated $tfvars_file into $terragrunt_working_dir" \cp -f "$tfvars_file" "$terragrunt_working_dir" done
true
aca48f6f45346cc3c10ce27f19de48e6c07fdafd
Shell
musically-ut/NeuralPointProcess
/code/scripts/synthetic_run.sh
UTF-8
569
2.5625
3
[]
no_license
#!/bin/bash DATA_ROOT=$HOME/Research/LSTMPointProcess/data/synthetic/hawkes RESULT_ROOT=$HOME/scratch/results/NeuralPointProcess H=128 bsize=256 bptt=8 learning_rate=0.0001 max_iter=4000 cur_iter=0 save_dir=$RESULT_ROOT/saved-hidden-$H-bsize-$bsize if [ ! -e $save_dir ]; then mkdir -p $save_dir fi dev_id=0 ./build/synthetic -event $DATA_ROOT/event.txt -time $DATA_ROOT/time.txt -lr $learning_rate -device $dev_id -maxe $max_iter -svdir $save_dir -hidden $H -b $bsize -int_report 100 -int_test 500 -bptt $bptt -cur_iter $cur_iter 2>&1 | tee $save_dir/log.txt
true
98c1b340987658007766e3d867b45df1ae074b99
Shell
Nico-Curti/shut
/bash/install_g++.sh
UTF-8
3,713
3.765625
4
[]
no_license
#!/bin/bash red='\033[1;31m' green='\033[1;32m' yellow='\033[1;33m' reset='\033[0m' # No Color version="9.2.0" function get_g++ { add2path=$1 postfix=$2 if [[ "$OSTYPE" == "darwin"* ]]; then url_gcc="ftp://ftp.gnu.org/gnu/gcc/gcc-$version/gcc-$version.tar.gz" else url_gcc="ftp://ftp.gnu.org/gnu/gcc/gcc-$version/gcc-$version.tar.gz" fi echo -e "${yellow}Download g++ from ${url_gcc}${reset}" out_dir=$(echo $url_gcc | rev | cut -d'/' -f 1 | rev) out="$(basename $out_dir .tar.gz)" wget $url_gcc echo -e "${yellow}Unzip ${out_dir}${reset}" tar xzf $out_dir mv $out $out-sources cd $out-sources ./contrib/download_prerequisites cd .. mkdir -p objdir cd objdir $PWD/../$out-sources/configure --prefix=$HOME/$out --enable-languages=c,c++ || $PWD/../$out-sources/configure --prefix=$HOME/$out --enable-languages=c,c++ --disable-multilib make make install cd .. if $add2path; then echo "export CC=$HOME/$out/bin/gcc" >> ~/.bashrc echo "export CXX=$HOME/$out/bin/g++" >> ~/.bashrc if $postfix; then echo export PATH='$PATH':$HOME/$out/bin/ >> ~/.bashrc else echo export PATH=$HOME/$out/bin/:'$PATH' >> ~/.bashrc fi fi #export CC=$HOME/$out/bin/gcc #export CXX=$HOME/$out/bin/g++ #export PATH=$PATH:$PWD/$out/bin/ } function install_g++ { add2path=$1 confirm=$2 postfix=$3 printf "${yellow}g++ identification: ${reset}" if [ ! -z $(which g++) ]; then # found a version of g++ GCCVER=$(g++ --version | cut -d' ' -f 3); GCCVER=$(echo $GCCVER | cut -d' ' -f 1 | cut -d'.' -f 1); fi if [ -z $(which g++) ]; then echo -e "${red}NOT FOUND{reset}" # NO g++ if [ ! -z $(which conda) ]; then if [ "$confirm" == "-y" ] || [ "$confirm" == "-Y" ] || [ "$confirm" == "yes" ]; then conda install -y -c conda-forge isl=0.17.1 conda install -y -c creditx gcc-7 conda install -y -c gouarin libgcc-7 else read -p "Do you want install it? [y/n] " confirm if [ "$confirm" == "n" ] || [ "$confirm" == "N" ]; then echo -e "${red}Abort${reset}"; else conda install -y -c conda-forge isl=0.17.1 conda install -y -c creditx gcc-7 conda install -y -c gouarin libgcc-7 fi fi else echo -e "${red}g++ available only with conda${reset}" exit 1 fi # end conda installer elif [[ "$GCCVER" -lt "5" ]]; then # check version of g++ echo -e "${red}sufficient version NOT FOUND${reset}" if [ $(which make) != "" ]; then if [ "$confirm" == "-y" ] || [ "$confirm" == "-Y" ] || [ "$confirm" == "yes" ]; then get_g++ $add2path $postfix else read -p "Do you want install it? [y/n] " confirm if [ "$confirm" == "n" ] || [ "$confirm" == "N" ]; then echo -e "${red}Abort${reset}"; else get_g++ $add2path $postfix fi fi else echo -e "${red}g++ installation without conda available only with make installed${reset}" exit 1 fi else echo -e "${green}FOUND${reset}"; ver=$(echo $(g++ --version) | cut -d' ' -f 4) ver=$(echo "${ver//./}") currver=$(echo "${version//./}") if [ $ver -lt $currver ]; then echo -e "${red}Old g++ version found${reset}" if [ "$confirm" == "-y" ] || [ "$confirm" == "-Y" ] || [ "$confirm" == "yes" ]; then get_g++ $add2path $postfix else read -p "Do you want install it? [y/n] " confirm if [ "$confirm" == "n" ] || [ "$confirm" == "N" ]; then echo -e "${red}Abort${reset}"; else get_g++ $add2path $postfix fi fi fi fi } #install_g++ true -y
true
2c8f649197cc200bf7a45c3ef8b90c03c98897c5
Shell
mrubio-chavarria/mres_project_2
/experiment4_ap.sh
UTF-8
386
2.515625
3
[ "MIT" ]
permissive
#!/bin/bash #PBS -lselect=1:ncpus=8:mem=48gb:ngpus=2 #PBS -lwalltime=24:00:00 # Load dependencies module load anaconda3/personal source activate project2_venv # Check GPUs echo "Available GPUs: $CUDA_VISIBLE_DEVICES" # Launch script echo "Launch script" python3 $HOME/project_2/experiment4_ap.py $CUDA_VISIBLE_DEVICES $HOME/project_2/databases/working_ap $HOME/project_2/ap_exp4.tsv
true
cbbd91419fba838fa030d9a3eac5a0313f840e24
Shell
egpbos/prezto
/runcoms/zshrc
UTF-8
3,322
2.96875
3
[ "MIT" ]
permissive
# # Executes commands at the start of an interactive session. # # Authors: # Sorin Ionescu <[email protected]> # # for powerlevel9k theme # some things, like the _MODE variable, need to be set before loading the theme through prezto! export DEFAULT_USER=pbos export POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(os_icon context ssh dir anaconda vcs) export POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(status root_indicator background_jobs time) export POWERLEVEL9K_MODE='nerdfont-complete' # needed to suppress warnings at stoomboot: export TERM="xterm-256color" export LANG="en_US.UTF-8" # anaconda POWERLEVEL9K_ANACONDA_LEFT_DELIMITER="" POWERLEVEL9K_ANACONDA_RIGHT_DELIMITER="" #POWERLEVEL9K_ANACONDA_BACKGROUND="navy" POWERLEVEL9K_ANACONDA_FOREGROUND="white" # path formatting POWERLEVEL9K_SHORTEN_DIR_LENGTH=2 POWERLEVEL9K_SHORTEN_DELIMITER="" POWERLEVEL9K_SHORTEN_STRATEGY="truncate_from_right" # Source Prezto. if [[ -s "${ZDOTDIR:-$HOME}/.zprezto/init.zsh" ]]; then source "${ZDOTDIR:-$HOME}/.zprezto/init.zsh" fi # Customize to your needs... # Aliases #alias rm='nocorrect rm -I' alias rm='nocorrect rm' # turn off noclobber setopt clobber # added by Miniconda3 4.3.11 installer # (in .bash_profile, moved here by me) #export PATH="/Users/pbos/sw/miniconda3/bin:$PATH" # moved paths to zprofile path list! # macports #export PATH=/opt/local/bin:/opt/local/sbin:$PATH # host dependent things case $HOST in (ESLT*) # ROOT loading alias apcocsm_root_debug='source /Users/pbos/projects/apcocsm/roofit-dev/root/cmake-build-debug/bin/thisroot.sh' # Custom Python paths export PYTHONPATH="/Users/pbos/code/python_modules" alias pybarcode='export PYTHONPATH="/Users/pbos/code/barcoding/python:$PYTHONPATH"' ;; (flappi.local) DEFAULT_USER=patrick ;; (stbc-i*) # lsetup stuff on interactive stoomboot nodes source /project/atlas/nikhef/cvmfs/setup.sh # setupATLAS (the alias doesn't seem to work from .zshrc): source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh > /dev/null # ... apparently this is not necessary here, we just want git, so do below # ... correction, it is necessary after all, otherwise it says the platform is not compatible or something # git source $ATLAS_LOCAL_ROOT_BASE/packageSetups/localSetup.sh "git" > /dev/null # this is what `lsetup git` will actually do for you (lsetup is a function) ;; (mlp*) ;; (*) echo "How did I get in the middle of nowhere?" echo $HOST ;; esac # The next line updates PATH for the Google Cloud SDK. if [ -f '/Users/pbos/sw/google-cloud-sdk/path.zsh.inc' ]; then . '/Users/pbos/sw/google-cloud-sdk/path.zsh.inc'; fi # The next line enables shell command completion for gcloud. if [ -f '/Users/pbos/sw/google-cloud-sdk/completion.zsh.inc' ]; then . '/Users/pbos/sw/google-cloud-sdk/completion.zsh.inc'; fi # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! __conda_setup="$('/Users/pbos/sw/miniconda3/bin/conda' 'shell.zsh' 'hook' 2> /dev/null)" if [ $? -eq 0 ]; then eval "$__conda_setup" else if [ -f "/Users/pbos/sw/miniconda3/etc/profile.d/conda.sh" ]; then . "/Users/pbos/sw/miniconda3/etc/profile.d/conda.sh" else export PATH="/Users/pbos/sw/miniconda3/bin:$PATH" fi fi unset __conda_setup # <<< conda initialize <<<
true
ff3b3d810d53fcca0f3c3a400808389b96204a06
Shell
WebFlight/cf-mendix-buildpack
/healthcheck/healthcheck.sh
UTF-8
401
2.96875
3
[]
no_license
#!/bin/bash # min_live_time=3600 # total_live_time=$(($min_live_time + ${CF_INSTANCE_INDEX} * 200)) # counter=0 function killAllVcapProcesses { killall -u vcap } while : do if ! pgrep -x "java" > /dev/null then killAllVcapProcesses break fi # if [ $total_live_time -lt $counter ] # then # killAllVcapProcesses # break # fi # let counter=counter+1 sleep 1 done
true
f0ca7d0338cb6c3058b987055097bde43c1eb087
Shell
yindian/myutils
/scripts/shell/hlsdown
UTF-8
3,794
3.640625
4
[]
no_license
#!/bin/bash get_ts_from_list() { grep -v "^#" <<< "$1" | while read; do if [ "${REPLY:0:1}" = "/" ]; then URL_SCHEMA="${URL%%://*}" URL_WO_SCHEMA="${URL#$URL_SCHEMA://}" URL_HOST="${URL_WO_SCHEMA%%/*}" echo "$URL_SCHEMA://$URL_HOST$REPLY" elif [ "${REPLY:0:4}" != "http" ]; then URL_BASE="${URL%%\?*}" echo "${URL_BASE%/*}/$REPLY" else echo "$REPLY" fi done | wget -T 30 -t 0 -q -i - } SED=gsed if ! which $SED > /dev/null; then SED=sed fi CURL_W_OPT="curl --retry 3 -m 3 -s" DURATION=1 OUTDIR=out while [ -n "$1" -a "${1:0:1}" = "-" ]; do if [ "$1" = "-t" ]; then DURATION=$2 shift 2 elif [ "$1" = "-d" ]; then OUTDIR=$2 shift 2 fi done if [ -z "$1" ]; then echo "Usage: $0 [-t duration] [-d directory] m3u8_url" exit fi INDEX="$($CURL_W_OPT "$1")" URL="$1" LIST= if [ -z "$INDEX" -a -n "$URL" -a ${#URL} -le 2 ]; then URL="$(xxd -ps -c 70 -r <<< 687474703a2f2f76646e2e6c6976652e636e74762e636e2f617069322f6c69766548746d6c352e646f3f6368616e6e656c3d70613a2f2f636374765f7032705f686463637476)$URL&client=html5" URL="${URL/cctvah/anhui}" echo "$URL" INDEX="$($CURL_W_OPT "$URL")" fi if [ -z "$INDEX" -a -n "$($CURL_W_OPT -L "$1")" ]; then URL="$($CURL_W_OPT -L -o /dev/null -w %{url_effective} "$1")" INDEX="$($CURL_W_OPT "$URL")" fi if [ -n "$INDEX" -a "${INDEX:0:18}" = "var html5VideoData" ]; then URL="${INDEX#*\"hls2\":\"}" if [ "$URL" != "$INDEX" ]; then echo "Resolving html5VideoData" URL="${URL%%\"*}" URL="$($SED 's/wscdns.com/cloudcdn.net/' <<< "$URL")" INDEX="$($CURL_W_OPT "$URL")" echo "$URL" fi fi if [ -n "$INDEX" -a "${INDEX:0:7}" = "#EXTM3U" ]; then if grep -q "#EXT-X-STREAM-INF" <<< "$INDEX"; then INDEX="$(tr -d '\r' <<< "$INDEX" | $SED -n '/^#EXT-X-STREAM-INF/,/^[^#]/{/^#/H;/^[^#]/{G;s/\n/ /g;s/BANDWIDTH=/& /;p;s/.*//;h}}' | sort -n -k 3 -r)" while read M3U8 _; do if [ "${M3U8:0:1}" = "/" ]; then URL_SCHEMA="${URL%%://*}" URL_WO_SCHEMA="${URL#$URL_SCHEMA://}" URL_HOST="${URL_WO_SCHEMA%%/*}" M3U8="$URL_SCHEMA://$URL_HOST$M3U8" elif [ "${M3U8:0:4}" != "http" ]; then URL_BASE="${URL%%\?*}" M3U8="${URL_BASE%/*}/$M3U8" fi echo "Trying $M3U8" LIST="$($CURL_W_OPT "$M3U8")" if [ -n "$LIST" -a "${LIST:0:7}" = "#EXTM3U" ]; then URL="$M3U8" echo "Selected $M3U8" break fi done <<< "$INDEX" else LIST="$INDEX" fi else echo "Invalid source $1" exit 1 fi LIST="$(tr -d '\r' <<< "$LIST")" STEP=1 if grep -q "#EXT-X-TARGETDURATION" <<< "$LIST"; then STEP="$($SED -n '/^#EXT-X-TARGETDURATION/{s/^.*: *//;p}' <<< "$LIST")" if [ "$STEP" -gt 2 ]; then STEP=$(($STEP - 1)) else STEP=1 fi fi echo "Duration $DURATION, step $STEP" mkdir -p "$OUTDIR" cd "$OUTDIR" if grep -q "#EXT-X-MEDIA-SEQUENCE" <<< "$LIST"; then LASTN="$($SED -n '/^#EXT-X-MEDIA-SEQUENCE/{s/^.*: *//;p}' <<< "$LIST")" OLDLIST= while [ "$DURATION" -gt 0 ]; do THIS_STEP="${LIST#*#EXTINF:}" if [ "$THIS_STEP" != "$LIST" ]; then THIS_STEP=$($SED -n '1{s/[,.].*//;p}' <<< "$THIS_STEP") if [ "$THIS_STEP" -gt 2 ]; then THIS_STEP=$(($THIS_STEP - 1)) else THIS_STEP=1 fi else THIS_STEP= fi echo "Downloading sequence $LASTN, left duration $DURATION, step ${THIS_STEP:-$STEP}" get_ts_from_list "$(diff <(cat <<< "$OLDLIST") <(cat <<< "$LIST") | $SED -n '/^> /{s/^> //;p}')" OLDLIST="$LIST" DURATION=$(($DURATION - ${THIS_STEP:-$STEP})) LIST="$($CURL_W_OPT "$URL" | tr -d '\r')" N="$($SED -n '/^#EXT-X-MEDIA-SEQUENCE/{s/^.*: *//;p}' <<< "$LIST" | head -1)" while [ "$N" = "$LASTN" -o -z "$N" -o "0$N" -le "0$LASTN" ]; do sleep 1 LIST="$($CURL_W_OPT "$URL" | tr -d '\r')" N="$($SED -n '/^#EXT-X-MEDIA-SEQUENCE/{s/^.*: *//;p}' <<< "$LIST")" done LASTN="$N" done else echo "No sequence. Download only once." get_ts_from_list "$LIST" fi
true
773bc64556d7eb6d0e73ea9c8c7c862bf160fcd8
Shell
niusmallnan/decrypt-ipsec
/decrypt-ipsec.sh
UTF-8
1,543
3.953125
4
[]
no_license
#!/bin/bash set -e source $(dirname $0)/common.sh config() { check_param load_ns echo "######################################" echo "####Get ESP config for Wireshark!####" echo "######################################" BASE_CMD="ip netns exec ${ROUTER_CONTAINER_ID} ip xfrm stat" SPI_LIST=$(${BASE_CMD} | grep "spi " | awk '{print $4}') for SPI in $SPI_LIST do data=$(${BASE_CMD} list spi ${SPI}) echo -n "${data}\n" | head -1 echo "SPI: ${SPI}" echo "Encryption: $(echo -n "${data}\n" | grep "rfc" | awk '{print $2}')" echo "Encryption key: $(echo -n "${data}\n" | grep "rfc" | awk '{print $3}')" echo "Authentication: Any $(echo -n "${data}\n" | grep "rfc" | awk '{print $4}') bit authentication" echo "=========================" done } dump() { check_param echo "Dump traffic packets by tcpdump!" echo "You can use like: ip netns exec ${ROUTER_CONTAINER_ID} tcpdump -i eth0 -venn -w ~/test.cap" } load_ns() { NETNS_BASE="/var/run/netns/" if [ ! -d ${NETNS_BASE} ]; then mkdir -p ${NETNS_BASE} fi CONTAINER_NETNS=${NETNS_BASE}${ROUTER_CONTAINER_ID} if [ ! -f $CONTAINER_NETNS ]; then ln -sv /proc/$(docker inspect -f '{{.State.Pid}}' ${ROUTER_CONTAINER_ID})/ns/net ${CONTAINER_NETNS} fi echo "Get Container NameSpace: ${CONTAINER_NETNS}" } check_param() { if [ -z "$ROUTER_CONTAINER_ID" ]; then print_error "There is no ipsec-router container" fi } main "$@"
true
bc9485c3e638868007aa3b17757ac9fdb3280720
Shell
cmsd2/recon
/archive/recon-gossip/gossip.sh
UTF-8
366
2.796875
3
[ "Apache-2.0" ]
permissive
#!/bin/bash export RUST_LOG="recon=trace,gossip=trace" trap 'kill $(jobs -pr)' SIGINT SIGTERM EXIT truncate -s 0 peers.txt for PORT in `seq 9000 9008`; do echo "127.0.0.1:$PORT" >> peers.txt done cargo build --example gossip for PORT in `seq 9000 9008`; do ./target/debug/examples/gossip 127.0.0.1:$PORT peers.txt > $PORT.txt 2>&1 & done tail -f 9000.txt
true
078039ae6b361933f9321a04fc773c1bd23b17dd
Shell
ryanpetrello/akanda-rug
/devstack/plugin.sh
UTF-8
15,557
2.625
3
[ "Apache-2.0" ]
permissive
# -*- mode: shell-script -*- # Set up default directories AKANDA_RUG_DIR=${AKANDA_RUG_DIR:-$DEST/akanda-rug} AKANDA_NEUTRON_DIR=${AKANDA_NEUTRON_DIR:-$DEST/akanda-neutron} AKANDA_NEUTRON_REPO=${AKANDA_NEUTRON_REPO:-http://github.com/stackforge/akanda-neutron.git} AKANDA_NEUTRON_BRANCH=${AKANDA_NEUTRON_BRANCH:-master} AKANDA_APPLIANCE_DIR=${AKANDA_APPLIANCE_DIR:-$DEST/akanda-appliance} AKANDA_APPLIANCE_REPO=${AKANDA_APPLIANCE_REPO:-http://github.com/stackforge/akanda-appliance.git} AKANDA_APPLIANCE_BRANCH=${AKANDA_APPLIANCE_BRANCH:-master} BUILD_AKANDA_APPLIANCE_IMAGE=${BUILD_AKANDA_APPLIANCE_IMAGE:-False} AKANDA_DEV_APPLIANCE_URL=${AKANDA_DEV_APPLIANCE_URL:-http://akandaio.objects.dreamhost.com/akanda_cloud.qcow2} AKANDA_DEV_APPLIANCE_FILE=${AKANDA_DEV_APPLIANCE_FILE:-$TOP_DIR/files/akanda.qcow2} AKANDA_DEV_APPLIANCE_BUILD_PROXY=${AKANDA_DEV_APPLIANCE_BUILD_PROXY:-""} AKANDA_HORIZON_DIR=${AKANDA_HORIZON_DIR:-$DEST/akanda-horizon} AKANDA_HORIZON_REPO=${AKANDA_HORIZON_REPO:-http://github.com/stackforge/akanda-horizon} AKANDA_HORIZON_BRANCH=${AKANDA_HORIZON_BRANCH:-master} AKANDA_CONF_DIR=/etc/akanda-rug AKANDA_RUG_CONF=$AKANDA_CONF_DIR/rug.ini # Router instances will run as a specific Nova flavor. These values configure # the specs of the flavor devstack will create. ROUTER_INSTANCE_FLAVOR_ID=${ROUTER_INSTANCE_FLAVOR_ID:-135} # NOTE(adam_g): This can be auto-generated UUID once RUG supports non-int IDs here ROUTER_INSTANCE_FLAVOR_RAM=${ROUTER_INSTANCE_FLAVOR_RAM:-512} ROUTER_INSTANCE_FLAVOR_DISK=${ROUTER_INSTANCE_FLAVOR_DISK:-5} ROUTER_INSTANCE_FLAVOR_CPUS=${ROUTER_INSTANCE_FLAVOR_CPUS:-1} PUBLIC_INTERFACE_DEFAULT='eth0' AKANDA_RUG_MANAGEMENT_PREFIX=${RUG_MANGEMENT_PREFIX:-"fdca:3ba5:a17a:acda::/64"} AKANDA_RUG_MANAGEMENT_PORT=${AKANDA_RUG_MANAGEMENT_PORT:-5000} AKANDA_RUG_API_PORT=${AKANDA_RUG_API_PORT:-44250} HORIZON_LOCAL_SETTINGS=$HORIZON_DIR/openstack_dashboard/local/local_settings.py # Path to public ssh key that will be added to the 'akanda' users authorized_keys # within the appliance VM. AKANDA_APPLIANCE_SSH_PUBLIC_KEY=${AKANDA_APPLIANCE_SSH_PUBLIC_KEY:-/home/$STACK_USER/.ssh/id_rsa.pub} function colorize_logging { # Add color to logging output - this is lifted from devstack's functions to colorize the non-standard # akanda format iniset $AKANDA_RUG_CONF DEFAULT logging_exception_prefix "%(color)s%(asctime)s.%(msecs)03d TRACE %(name)s [01;" iniset $AKANDA_RUG_CONF DEFAULT logging_debug_format_suffix "from (pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d" iniset $AKANDA_RUG_CONF DEFAULT logging_default_format_string "%(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s:%(process)s:%(processName)s:%(threadName)s [-%(color)s] %(color)s%(message)s" iniset $AKANDA_RUG_CONF DEFAULT logging_context_format_string "%(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s:%(process)s:%(processName)s:%(threadName)s [%(request_id)s %(user)s %(tenant)s%(color)s] %(color)s%(message)s" } function configure_akanda() { if [[ ! -d $AKANDA_CONF_DIR ]]; then sudo mkdir -p $AKANDA_CONF_DIR fi sudo chown $STACK_USER $AKANDA_CONF_DIR cp $AKANDA_RUG_DIR/etc/rug.ini $AKANDA_RUG_CONF iniset $AKANDA_RUG_CONF DEFAULT verbose True iniset $AKANDA_RUG_CONF DEFAULT admin_user $Q_ADMIN_USERNAME iniset $AKANDA_RUG_CONF DEFAULT admin_password $SERVICE_PASSWORD iniset $AKANDA_RUG_CONF DEFAULT rabbit_userid $RABBIT_USERID iniset $AKANDA_RUG_CONF DEFAULT rabbit_host $RABBIT_HOST iniset $AKANDA_RUG_CONF DEFAULT rabbit_password $RABBIT_PASSWORD iniset $AKANDA_RUG_CONF DEFAULT amqp_url "amqp://$RABBIT_USERID:$RABBIT_PASSWORD@$RABBIT_HOST:$RABBIT_PORT/" iniset $AKANDA_RUG_CONF DEFAULT control_exchange "neutron" iniset $AKANDA_RUG_CONF DEFAULT router_instance_flavor $ROUTER_INSTANCE_FLAVOR iniset $AKANDA_RUG_CONF DEFAULT boot_timeout "6000" iniset $AKANDA_RUG_CONF DEFAULT num_worker_processes "2" iniset $AKANDA_RUG_CONF DEFAULT num_worker_threads "2" iniset $AKANDA_RUG_CONF DEFAULT reboot_error_threshold "2" iniset $AKANDA_RUG_CONF DEFAULT management_prefix $AKANDA_RUG_MANAGEMENT_PREFIX iniset $AKANDA_RUG_CONF DEFAULT akanda_mgt_service_port $AKANDA_RUG_MANAGEMENT_PORT iniset $AKANDA_RUG_CONF DEFAULT rug_api_port $AKANDA_RUG_API_PORT if [[ "$Q_AGENT" == "linuxbridge" ]]; then iniset $AKANDA_RUG_CONF DEFAULT interface_driver "akanda.rug.common.linux.interface.BridgeInterfaceDriver" fi iniset $AKANDA_RUG_CONF DEFAULT ssh_public_key $AKANDA_APPLIANCE_SSH_PUBLIC_KEY iniset $AKANDA_RUG_CONF database connection `database_connection_url akanda` if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ]; then colorize_logging fi } function configure_akanda_nova() { iniset $NOVA_CONF DEFAULT service_neutron_metadata_proxy True iniset $NOVA_CONF DEFAULT use_ipv6 True } function configure_akanda_neutron() { iniset $NEUTRON_CONF DEFAULT core_plugin akanda.neutron.plugins.ml2_neutron_plugin.Ml2Plugin iniset $NEUTRON_CONF DEFAULT service_plugins akanda.neutron.plugins.ml2_neutron_plugin.L3RouterPlugin iniset $NEUTRON_CONF DEFAULT api_extensions_path $AKANDA_NEUTRON_DIR/akanda/neutron/extensions # Use rpc as notification driver instead of the default no_ops driver # We need the RUG to be able to get neutron's events notification like port.create.start/end # or router.interface.start/end to make it able to boot akanda routers iniset $NEUTRON_CONF DEFAULT notification_driver "neutron.openstack.common.notifier.rpc_notifier" } function configure_akanda_horizon() { # _horizon_config_set depends on this being set local local_settings=$HORIZON_LOCAL_SETTINGS for ext in $(ls $AKANDA_HORIZON_DIR/openstack_dashboard_extensions/*.py); do local ext_dest=$HORIZON_DIR/openstack_dashboard/local/enabled/$(basename $ext) rm -rf $ext_dest ln -s $ext $ext_dest # if horizon is enabled, we assume lib/horizon has been sourced and _horizon_config_set # is defined _horizon_config_set $HORIZON_LOCAL_SETTINGS "" RUG_MANAGEMENT_PREFIX \"$AKANDA_RUG_MANAGEMENT_PREFIX\" _horizon_config_set $HORIZON_LOCAL_SETTINGS "" RUG_API_PORT \"$AKANDA_RUG_API_PORT\" done } function start_akanda_horizon() { restart_apache_server } function install_akanda() { git_clone $AKANDA_NEUTRON_REPO $AKANDA_NEUTRON_DIR $AKANDA_NEUTRON_BRANCH setup_develop $AKANDA_NEUTRON_DIR setup_develop $AKANDA_RUG_DIR # temp hack to add blessed durring devstack installs so that rug-ctl browse works out of the box pip_install blessed if [ "$BUILD_AKANDA_APPLIANCE_IMAGE" == "True" ]; then git_clone $AKANDA_APPLIANCE_REPO $AKANDA_APPLIANCE_DIR $AKANDA_APPLIANCE_BRANCH fi if is_service_enabled horizon; then git_clone $AKANDA_HORIZON_REPO $AKANDA_HORIZON_DIR $AKANDA_HORIZON_BRANCH setup_develop $AKANDA_HORIZON_DIR fi } function _auth_args() { local username=$1 local password=$2 local tenant_name=$3 local auth_args="--os-username $username --os-password $password --os-auth-url $OS_AUTH_URL" if [ "$OS_IDENTITY_API_VERSION" -eq "3" ]; then auth_args="$auth_args --os-project-name $tenant_name" else auth_args="$auth_args --os-tenant-name $tenant_name" fi echo "$auth_args" } function create_akanda_nova_flavor() { nova flavor-create akanda $ROUTER_INSTANCE_FLAVOR_ID \ $ROUTER_INSTANCE_FLAVOR_RAM $ROUTER_INSTANCE_FLAVOR_DISK \ $ROUTER_INSTANCE_FLAVOR_CPUS iniset $AKANDA_RUG_CONF DEFAULT router_instance_flavor $ROUTER_INSTANCE_FLAVOR_ID } function _remove_subnets() { # Attempt to delete subnets associated with a network. # We have to modify the output of net-show to allow it to be # parsed properly as shell variables, and we run both commands in # a subshell to avoid polluting the local namespace. (eval $(neutron $auth_args net-show -f shell $1 | sed 's/:/_/g'); neutron $auth_args subnet-delete $subnets || true) } function pre_start_akanda() { # Create and init the database recreate_database akanda akanda-rug-dbsync --config-file $AKANDA_RUG_CONF upgrade local auth_args="$(_auth_args $Q_ADMIN_USERNAME $SERVICE_PASSWORD $SERVICE_TENANT_NAME)" if ! neutron $auth_args net-show $PUBLIC_NETWORK_NAME; then neutron $auth_args net-create $PUBLIC_NETWORK_NAME --router:external fi # Remove the ipv6 subnet created automatically before adding our own. # NOTE(adam_g): For some reason this fails the first time and needs to be repeated? _remove_subnets $PUBLIC_NETWORK_NAME ; _remove_subnets $PUBLIC_NETWORK_NAME typeset public_subnet_id=$(neutron $auth_args subnet-create --ip-version 4 $PUBLIC_NETWORK_NAME 172.16.77.0/24 | grep ' id ' | awk '{ print $4 }') iniset $AKANDA_RUG_CONF DEFAULT external_subnet_id $public_subnet_id neutron $auth_args subnet-create --ip-version 6 $PUBLIC_NETWORK_NAME fdee:9f85:83be::/48 # Point neutron-akanda at the subnet to use for floating IPs. This requires a neutron service restart (later) to take effect. iniset $NEUTRON_CONF akanda floatingip_subnet $public_subnet_id # setup masq rule for public network sudo iptables -t nat -A POSTROUTING -s 172.16.77.0/24 -o $PUBLIC_INTERFACE_DEFAULT -j MASQUERADE neutron $auth_args net-show $PUBLIC_NETWORK_NAME | grep ' id ' | awk '{ print $4 }' typeset public_network_id=$(neutron $auth_args net-show $PUBLIC_NETWORK_NAME | grep ' id ' | awk '{ print $4 }') iniset $AKANDA_RUG_CONF DEFAULT external_network_id $public_network_id neutron $auth_args net-create mgt typeset mgt_network_id=$(neutron $auth_args net-show mgt | grep ' id ' | awk '{ print $4 }') iniset $AKANDA_RUG_CONF DEFAULT management_network_id $mgt_network_id # Remove the ipv6 subnet created automatically before adding our own. _remove_subnets mgt typeset mgt_subnet_id=$(neutron $auth_args subnet-create mgt fdca:3ba5:a17a:acda::/64 --ip-version=6 --ipv6_address_mode=slaac --enable_dhcp | grep ' id ' | awk '{ print $4 }') iniset $AKANDA_RUG_CONF DEFAULT management_subnet_id $mgt_subnet_id # Remove the private network created by devstack neutron $auth_args subnet-delete $PRIVATE_SUBNET_NAME neutron $auth_args net-delete $PRIVATE_NETWORK_NAME local akanda_dev_image_src="" if [ "$BUILD_AKANDA_APPLIANCE_IMAGE" == "True" ]; then if [[ $(type -P disk-image-create) == "" ]]; then pip_install "diskimage-builder<0.1.43" fi # Point DIB at the devstack checkout of the akanda-appliance repo DIB_REPOLOCATION_akanda=$AKANDA_APPLIANCE_DIR \ DIB_REPOREF_akanda="$(cd $AKANDA_APPLIANCE_DIR && git rev-parse HEAD)" \ DIB_AKANDA_APPLIANCE_DEBUG_USER=$ADMIN_USERNAME \ DIB_AKANDA_APPLIANCE_DEBUG_PASSWORD=$ADMIN_PASSWORD \ http_proxy=$AKANDA_DEV_APPLIANCE_BUILD_PROXY \ ELEMENTS_PATH=$AKANDA_APPLIANCE_DIR/diskimage-builder/elements \ DIB_RELEASE=jessie DIB_EXTLINUX=1 disk-image-create debian vm akanda debug-user \ -o $TOP_DIR/files/akanda akanda_dev_image_src=$AKANDA_DEV_APPLIANCE_FILE else akanda_dev_image_src=$AKANDA_DEV_APPLIANCE_URL fi env TOKEN=$(openstack token issue -c id -f value) die_if_not_set $LINENO TOKEN "Keystone fail to get token" upload_image $akanda_dev_image_src $TOKEN local image_name=$(basename $akanda_dev_image_src | cut -d. -f1) typeset image_id=$(glance $auth_args image-list | grep $image_name | get_field 1) die_if_not_set $LINENO image_id "Failed to find akanda image" iniset $AKANDA_RUG_CONF DEFAULT router_image_uuid $image_id # NOTE(adam_g): Currently we only support keystone v2 auth so we need to # hardcode the auth url accordingly. See (LP: #1492654) iniset $AKANDA_RUG_CONF DEFAULT auth_url $KEYSTONE_AUTH_PROTOCOL://$KEYSTONE_AUTH_HOST:5000/v2.0 if is_service_enabled horizon; then # _horizon_config_set depends on this being set local local_settings=$HORIZON_LOCAL_SETTINGS _horizon_config_set $HORIZON_LOCAL_SETTINGS "" ROUTER_IMAGE_UUID \"$image_id\" fi create_akanda_nova_flavor } function start_akanda_rug() { screen_it ak-rug "cd $AKANDA_RUG_DIR && akanda-rug-service --config-file $AKANDA_RUG_CONF" echo '************************************************************' echo "Sleeping for a while to make sure the tap device gets set up" echo '************************************************************' sleep 10 } function post_start_akanda() { echo "Creating demo user network and subnet" local auth_args="$(_auth_args demo $OS_PASSWORD demo)" neutron $auth_args net-create thenet neutron $auth_args subnet-create thenet $FIXED_RANGE # Restart neutron so that `akanda.floatingip_subnet` is properly set if [[ "$USE_SCREEN" == "True" ]]; then screen_stop_service q-svc else stop_process q-svc fi start_neutron_service_and_check # Open all traffic on the private CIDR set_demo_tenant_sec_group_private_traffic } function stop_akanda_rug() { echo "Stopping the rug..." screen_stop_service ak-rug stop_process ak-rug } function set_neutron_user_permission() { # Starting from juno services users are not granted with the admin role anymore # but with a new `service` role. # Since nova policy allows only vms booted by admin users to attach ports on the # public networks, we need to modify the policy and allow users with the service # to do that too. local old_value='"network:attach_external_network": "rule:admin_api"' local new_value='"network:attach_external_network": "rule:admin_api or role:service"' sed -i "s/$old_value/$new_value/g" /etc/nova/policy.json } function set_demo_tenant_sec_group_private_traffic() { local auth_args="$(_auth_args demo $OS_PASSWORD demo)" neutron $auth_args security-group-rule-create --direction ingress --remote-ip-prefix $FIXED_RANGE default } function check_prereqs() { # Fail devstack as early as possible if system does not satisfy some known # prerequisites if [ ! -e "$AKANDA_APPLIANCE_SSH_PUBLIC_KEY" ]; then die $LINENO "Public SSH key not found at $AKANDA_APPLIANCE_SSH_PUBLIC_KEY. Please copy one there or " \ "set AKANDA_APPLIANCE_SSH_PUBLIC_KEY accordingly." fi } if is_service_enabled ak-rug; then if [[ "$1" == "source" ]]; then check_prereqs elif [[ "$1" == "stack" && "$2" == "install" ]]; then echo_summary "Installing Akanda" set_neutron_user_permission install_akanda elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then echo_summary "Installing Akanda" configure_akanda configure_akanda_nova configure_akanda_neutron if is_service_enabled horizon; then configure_akanda_horizon fi cd $old_cwd elif [[ "$1" == "stack" && "$2" == "extra" ]]; then echo_summary "Initializing Akanda" pre_start_akanda start_akanda_rug if is_service_enabled horizon; then start_akanda_horizon fi post_start_akanda fi if [[ "$1" == "unstack" ]]; then stop_akanda_rug fi if [[ "$1" == "clean" ]]; then # no-op : fi fi
true
381a4b42e681b8e3ee031fdcf63cda62828b57d1
Shell
youngminz/OperatingSystem
/Lab/Lab0_BuildNachos/build_modified_nachos.sh
UTF-8
231
2.703125
3
[]
no_license
#!/bin/bash -ex # Clean up function cleanup { rm -r nachos } trap cleanup EXIT # Copy from local cp -r ../../Nachos nachos # Build docker docker build --tag nachos:${1:-0.1} --tag nachos:latest --file original.Dockerfile .
true
3570b2507edb444acce89b092dd66f373b429c05
Shell
vsmcloud/terraform-puppet-env-deployment
/terraform/master_userdata.tpl
UTF-8
2,970
3.8125
4
[ "Apache-2.0" ]
permissive
#!/bin/bash # Initialize the variables # declare -x INPUT_JSON=$(cat <<EOF '{ "HostedZoneId": "${hosted_zone_id}", "ChangeBatch": { "Comment": "Update the A record set", "Changes": [ { "Action": "UPSERT", "ResourceRecordSet": { "Name": "${master_hostname}", "Type": "A", "TTL": 300, "ResourceRecords": [ { "Value": "$(curl --silent --show-error --retry 3 http://169.254.169.254/latest/meta-data/local-ipv4)" } ] } } ] } }' EOF ) function mountefs { yum install -y amazon-efs-utils mkdir /etc/puppetlabs mount -t efs fs-de0ab596:/ ${efs_id}:/ /etc/puppetlabs } function installpuppet { rpm -Uvh ${puppet_repo} yum -y install puppetserver export PATH=/opt/puppetlabs/bin:$PATH ### Configure the puppet master ### puppet config set certname ${master_hostname} --section main puppet config set dns_alt_names puppet,${master_hostname} --section master puppet config set autosign true --section master echo "puppet is installed." } function backupmaster { echo "backing up puppetlabs folder" mkdir /tmp/puppetbackup rm -rf /tmp/puppetbackup/* cp -a /etc/puppetlabs/. /tmp/puppetbackup } function restoremaster { rm -rf /etc/puppetlabs/* cp -a /tmp/puppetbackup/. /etc/puppetlabs echo "puppet is recovered." } function generater10kconfig { if [ ! -f /etc/puppetlabs/r10k/r10k.yaml ]; then echo -e "\nGenerating a r10k.yaml file" # Generate default r10k.yaml mkdir /etc/puppetlabs/r10k cat > /etc/puppetlabs/r10k/r10k.yaml <<EOL --- :cachedir: '/var/cache/r10k' :sources: :base: remote: '${r10k_repo}' basedir: '/etc/puppetlabs/code/environments' EOL fi } function installr10k { yum -y install git export PATH=/opt/puppetlabs/puppet/bin:$PATH /opt/puppetlabs/puppet/bin/gem install r10k } export LC_ALL=C # Set up the host name of the master node # hostname ${master_hostname} # Update the system # yum -y update # Create/Update DNS record of the puppet master node # eval aws route53 change-resource-record-sets --cli-input-json $INPUT_JSON # Mount EFS Volume # mountefs # Install Puppet# ## Folder /etc/puppetlabs is not empty, use existing puppet ## if find /etc/puppetlabs -mindepth 1 -print -quit | grep -q .; then backupmaster installpuppet installr10k restoremaster ## Folder /etc/puppetlabs is empty, install and configure puppet master ## else installpuppet installr10k generater10kconfig fi # Start the puppet master and add the service to start up # systemctl start puppetserver systemctl enable puppetserver /opt/puppetlabs/puppet/bin/r10k deploy environment puppet cert list --all
true
373b6272a9fa52b23e6f2d71c376160497d0da1c
Shell
itsdanielg/CSE-337-Scripting-Languages
/HW 3/q4_p2.sh
UTF-8
247
3.40625
3
[]
no_license
#!/bin/bash # q4_p2.sh counter=1 while [ $counter -le 10 ] do var=$(($counter%2)) if [ $var -eq 0 ]; then file="even$counter" touch $file chmod 764 $file else file="odd$counter" touch $file chmod 554 $file fi ((counter++)) done
true
7b3227075d13dc44a390805102b562d86b2ec859
Shell
Mailaender/aur-alucryd
/personal/python-pies/PKGBUILD
UTF-8
824
2.625
3
[]
no_license
# Maintainer: Maxime Gauduin <[email protected]> # Contributor: Yaron de Leeuw <[email protected]> pkgname=python-pies pkgver=2.6.7 pkgrel=1 pkgdesc='The simplest (and tastiest) way to write one program that runs on both Python 2.6+ and Python 3' arch=('any') url='https://github.com/timothycrosley/pies' license=('MIT') depends=('python') makedepends=('python-setuptools') source=("pies-${pkgver}.tar.gz::https://github.com/timothycrosley/pies/archive/${pkgver}.tar.gz") sha256sums=('ffb67104f2b4f2e5abf4f0ebb6e59420be4bacd9bb4b89ff47dfe6b535f51308') build() { cd pies-${pkgver} python setup.py build } package() { cd pies-${pkgver} python setup.py install --root="${pkgdir}" --optimize='1' install -dm 755 "${pkgdir}"/usr/share/licenses/python-pies install -m 644 LICENSE "${pkgdir}"/usr/share/licenses/python-pies/ } # vim: ts=2 sw=2 et:
true
e2bb30b314a3042296e0d9dcf17e2157dbb2efce
Shell
realinternetbox/angry-bee-agent
/angry-bee-agent-1652.sh
UTF-8
311
2.75
3
[]
no_license
#code_server = 1652 #sleep_time = 30 for i in {1..30} do echo "angry-bee-agent started" NOW=$(date +"%m-%d-%Y %r") echo "Date: " $NOW #speedtest-cli --server 1652 --simple speedtest-cli --server 1652 echo "angry-bee-agent executed" #sleep $sleep_time sleep 15 echo "Place: UPM" done
true
8ab98846a8f85925cd362cca02cf25e58dd98609
Shell
hubitor/dotfiles-12
/i3/playpause.sh
UTF-8
98
2.59375
3
[]
no_license
#!/bin/bash if [ $(playerctl status) = "Playing" ] then playerctl pause else playerctl play fi
true
586e6a3fe6e4053ef4be0e77e25f602bce8a18f5
Shell
han-hongyuan/k8s-charts
/mysqlBackup/backup.sh
UTF-8
1,193
3.78125
4
[]
no_license
#!/bin/bash set -e if [[ -z ${storagepath} || -z ${mysqlhost} || -z ${mysqlport} || -z ${mysqlpassword} ]]; then echo \"storagepath mysqlhost mysqlport mysqlpassword must be offered.\"; exit -1; fi nowtime=`date +'%Y-%m-%d_%H_%M_%S'` # mysql for simpleDb in `mysql -h${mysqlhost} -P${mysqlport} -uroot -p${mysqlpassword} -e "show databases;" -B -N|grep -v schema` do echo "备份${simpleDb}" mysqldump --force -h${mysqlhost} -P${mysqlport} -uroot -p${mysqlpassword} --databases ${simpleDb} --lock-tables=false --master-data=2 --single-transaction |gzip > ${storagepath}/mysqlbackup_${simpleDb}-${nowtime}.gz echo "${storagepath}/mysqlbackup_${simpleDb}-${nowtime}.gz" >> ${storagepath}/${nowtime} done echo "${nowtime}" >> ${storagepath}/mysql_backup.state if [[ `cat ${storagepath}/mysql_backup.state |wc -l` -gt 3 ]]; then file_delete=`head -n 1 ${storagepath}/mysql_backup.state` if [ -f ${storagepath}/${file_delete} ];then for delete_mysql in `cat ${storagepath}/${file_delete}` do echo "删除过期备份${delete_mysql}" rm -rf ${delete_mysql} done rm -f ${storagepath}/${file_delete} fi sed -i "/${file_delete}/d" ${storagepath}/mysql_backup.state fi
true
df5142e8b696b6a57269134da2ad25ffa1ce3379
Shell
scylladb/scylla-monitoring
/start-thanos.sh
UTF-8
3,399
4.03125
4
[ "Apache-2.0" ]
permissive
#!/usr/bin/env bash . versions.sh if [ -f env.sh ]; then . env.sh fi function usage { __usage="Usage: $(basename $0) [-h] [-S ip:port] Options: -h print this help and exit -S sidecart address - A side cart address:port multiple side cart can be comma delimited The script starts Thanos query, it connect to external Thanos side carts and act as a grafana data source " echo "$__usage" } function update_data_source { THANOS_ADDRESS="$(docker inspect --format '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' thanos)" if [[ $THANOS_ADDRESS = "" ]]; then THANOS_ADDRESS=`hostname -I | awk '{print $1}'` fi THANOS_ADDRESS="$THANOS_ADDRESS:10904" __datasource="# config file version apiVersion: 1 datasources: - name: thanos type: prometheus url: http://$THANOS_ADDRESS access: proxy basicAuth: false " echo "$__datasource" > grafana/provisioning/datasources/thanos.yaml } LIMITS="" VOLUMES="" PARAMS="" for arg; do shift if [ -z "$LIMIT" ]; then case $arg in (--limit) LIMIT="1" ;; (--volume) LIMIT="1" VOLUME="1" ;; (--param) LIMIT="1" PARAM="1" ;; (*) set -- "$@" "$arg" ;; esac else DOCR=`echo $arg|cut -d',' -f1` VALUE=`echo $arg|cut -d',' -f2-|sed 's/#/ /g'` NOSPACE=`echo $arg|sed 's/ /#/g'` if [ "$PARAM" = "1" ]; then if [ -z "${DOCKER_PARAMS[$DOCR]}" ]; then DOCKER_PARAMS[$DOCR]="" fi DOCKER_PARAMS[$DOCR]="${DOCKER_PARAMS[$DOCR]} $VALUE" PARAMS="$PARAMS --param $NOSPACE" unset PARAM else if [ -z "${DOCKER_LIMITS[$DOCR]}" ]; then DOCKER_LIMITS[$DOCR]="" fi if [ "$VOLUME" = "1" ]; then SRC=`echo $VALUE|cut -d':' -f1` DST=`echo $VALUE|cut -d':' -f2-` SRC=$(readlink -m $SRC) DOCKER_LIMITS[$DOCR]="${DOCKER_LIMITS[$DOCR]} -v $SRC:$DST" VOLUMES="$VOLUMES --volume $NOSPACE" unset VOLUME else DOCKER_LIMITS[$DOCR]="${DOCKER_LIMITS[$DOCR]} $VALUE" LIMITS="$LIMITS --limit $NOSPACE" fi fi unset LIMIT fi done SIDECAR=() DOCKER_PARAM="" while getopts ':hlp:S:D:' option; do case "$option" in l) DOCKER_PARAM="$DOCKER_PARAM --net=host" ;; h) usage exit ;; S) IFS=',' ;for s in $OPTARG; do SIDECAR+=(--store=$s) done ;; D) DOCKER_PARAM="$DOCKER_PARAM $OPTARG" ;; :) printf "missing argument for -%s\n" "$OPTARG" >&2 echo "$usage" >&2 exit 1 ;; \?) printf "illegal option: -%s\n" "$OPTARG" >&2 echo "$usage" >&2 exit 1 ;; esac done docker run ${DOCKER_LIMITS["thanos"]} -d $DOCKER_PARAM -i --name thanos -- docker.io/thanosio/thanos:$THANOS_VERSION \ query \ "--debug.name=query0" \ "--grpc-address=0.0.0.0:10903" \ "--grpc-grace-period=1s" \ "--http-address=0.0.0.0:10904" \ "--http-grace-period=1s" \ "--query.replica-label=prometheus" \ ${DOCKER_PARAMS["thanos"]} \ ${SIDECAR[@]} update_data_source
true
5b3137adf5c8f8ff0cd77e64d1f62e89020d3b93
Shell
simonstead/shell-scripts
/api-go-scaffold.sh
UTF-8
2,934
3.46875
3
[]
no_license
if [ "$#" -ne 1 ]; then echo "usage: $0 <app_name>"; exit 1; fi function scaffold() { local name=$1 echo "creating directory $name" mkdir $name cd $name mkdir handlers structs touch README.md touch $name"_test.go" # main echo "package main import ( \"fmt\" \"github.com/simonstead/$name/handlers\" \"net/http\" \"os\" ) func determineListenAddress() (string, error) { port := os.Getenv(\"PORT\") if port == \"\" { return \"\", fmt.Errorf(\"$PORT not set\") } return \":\" + port, nil } func main() { PORT, err := determineListenAddress() if err != nil { PORT = \":4000\" } http.HandleFunc(\"/\", handlers.RootHandler) // server fmt.Printf(\"-\n- Listening on port %v\n-\n\", PORT) if err := http.ListenAndServe(PORT, nil); err != nil { panic(err) } }" > $name.go echo "package main import ( \"encoding/json\" \"github.com/simonstead/$name/handlers\" \"github.com/simonstead/$name/structs\" \"net/http\" \"net/http/httptest\" \"testing\" ) func TestIndexRoute(t *testing.T) { req, err := http.NewRequest(\"GET\", \"/\", nil) if err != nil { t.Fatal(err) } rr := httptest.NewRecorder() handler := http.HandlerFunc(handlers.RootHandler) handler.ServeHTTP(rr, req) if status := rr.Code; status != http.StatusOK { t.Errorf(\"Handler returned wrong status code, got %v but wanted %v\", status, http.StatusOK) } result := structs.JsonRepsonse{} if err := json.NewDecoder(rr.Body).Decode(&result); err != nil { t.Errorf(\"body did not return correct json response: %v\", err) } if result.Msg != \"success\" { t.Errorf(\"json response returned but it errored:\n %v\", result.Error) } }" > $name"_test.go" # git git init echo ".env" > .gitignore # handlers echo "package handlers import ( \"encoding/json\" \"github.com/simonstead/$name/structs\" \"net/http\" ) func RootHandler(w http.ResponseWriter, r *http.Request) { w.Header().Set(\"Content-Type\", \"application/json\") response := &structs.JsonRepsonse{Msg: \"success\", Data: \"this is some data for you\"} if err := json.NewEncoder(w).Encode(response); err != nil { panic(err) } }" > handlers/handlers.go # structs echo "package structs type JsonRepsonse struct { Msg string \`json:\"msg\"\` Data string \`json:\"data\"\` Error string \`json:\"error\"\` }" > structs/structs.go go fmt go build dep init dep ensure # heroku stuff echo "[metadata.heroku] root-package = \"github.com/simonstead/$name\" [prune] go-tests = true unused-packages = true " > Gopkg.toml echo "web: $name" > Procfile git add . git commit -m "first commit of a simple api for $name" } scaffold $1
true
f106ad91a9dc7242c3286f9fb83da8ed50c919dc
Shell
95rade/docker-robot-framework
/test.sh
UTF-8
300
3.15625
3
[ "MIT" ]
permissive
#!/usr/bin/env bash set -e pwd ls -ltr which robot exit 0 # Run an individual test suite if the TEST_SUITE environmental variable is set. #if [ -z "$TEST_SUITE" ]; then # TEST_SUITE="" #fi #CMD="robot --console verbose --outputdir -d /reports -s /suites/$TEST_SUITE" #echo ${CMD} #``${CMD}``
true
a5770dec4df8ecb19cfbd20814d37ae954cdaf6f
Shell
dwtj/spring-native
/scripts/compileWithMaven.sh
UTF-8
785
3.765625
4
[ "Apache-2.0", "LicenseRef-scancode-generic-cla" ]
permissive
#!/usr/bin/env bash GREEN='\033[0;32m' RED='\033[0;31m' BLUE='\033[0;34m' NC='\033[0m' printf "=== ${BLUE}Building %s sample${NC} ===\n" "${PWD##*/}" rm -rf target mkdir -p target/native echo "Packaging ${PWD##*/} with Maven" # Only run Petclinic tests to speedup the full build while still testing a complex testing scenario if [[ ${PWD##*/} == petclinic* || ${PWD##*/} == *agent* ]] then echo "Performing native testing for ${PWD##*/}" mvn -ntp -Pnative package $* &> target/native/output.txt else mvn -ntp -DskipTests -Pnative package $* &> target/native/output.txt fi if [[ -f target/${PWD##*/} ]] then printf "${GREEN}SUCCESS${NC}\n" else cat target/native/output.txt printf "${RED}FAILURE${NC}: an error occurred when compiling the native image.\n" exit 1 fi
true
495b761e259bdd02d312bea968cef14c508d0f86
Shell
JaiHariOm/LaunchdAgent-ShellScript
/bin/brew-update.sh
UTF-8
500
3.078125
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#!/bin/sh launchctl setenv PATH /usr/local/bin:$PATH log_dir=$HOME/Library/Logs/Homebrew/update log_name=${log_dir}/`date "+%Y%m%d"`.log if [ ! -d ${log_dir} ]; then mkdir -p ${log_dir} fi brew doctor >> ${log_name} 2>&1 if [ $? -eq 0 ]; then brew file update >> ${log_name} 2>&1 brew file cask_upgrade -C >> ${log_name} 2>&1 fi #[brew-update.sh] #Copyright (c) 2016 JaiHariOm (Hayashi Sakihara) #This software is released under the MIT License. #http://opensource.org/licenses/mit-license.php
true
60c8fda8500897b30c26435b01e0a8ec93342b1e
Shell
JaneChoi8/test1
/sum.sh
UTF-8
163
3.421875
3
[]
no_license
#!/bin/bash sum=0 for var in "$@" do if [ $var -gt 10 ] then if [ `expr $var % 2` -eq 0 ] then var=-$var fi sum=`expr $sum + $var` fi done echo $sum
true
83e69c58ce8abf3d7f066b3d90922a66fa93a7ed
Shell
spring-cloud/spring-cloud-connectors
/ci/scripts/sync-to-maven-central.sh
UTF-8
357
2.703125
3
[ "LicenseRef-scancode-generic-cla", "Apache-2.0", "LGPL-2.0-or-later", "GPL-1.0-or-later", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#!/bin/bash set -euo pipefail readonly BUILD_INFO_LOCATION="$(pwd)/artifactory-repo/build-info.json" readonly CONFIG_DIR="$(pwd)/git-repo/ci/config" java -jar /opt/concourse-release-scripts*.jar \ --spring.config.location="${CONFIG_DIR}/release-scripts.yml" \ publishToCentral 'RELEASE' "$BUILD_INFO_LOCATION" "artifactory-repo" echo "Sync complete"
true
c8b0221fd1a91375209e0b0b7865115717cb681f
Shell
sudeepb02/Linuxication-mcug
/shell-script/scripts/forValentine.sh
UTF-8
2,099
3.53125
4
[]
no_license
#!/bin/zsh . `pwd`/poetryAndStuff.sh if [ ! -d Crushes ] then mkdir Crushes fi case $1 in a) echo -n "How many crushes do you want to add? : " read numberOfCrushes echo "Enter first names only, its polite :)" for i in {1..$numberOfCrushes} do echo -n "Enter name :) - " read name echo -n "Enter email ID : - " read emailid touch Crushes/"$name":"$emailid" done exit 0 ;; d) echo "Such heartbreak.. " ls -1 Crushes > crushesList cut -d ":" -f1 crushesList echo -n "Which one's gone? : " read toughLuck rm Crushes/`ls -1 Crushes | grep $toughLuck` echo "Better luck next time, mate.." rm crushesList exit 0 ;; e) ls -1 Crushes > crushesList #only for the cut command #Recipients Info RECIPIENT_NAME=(`cut -d ":" -f1 crushesList`) RECIPIENT_EMAILID=(`cut -d ":" -f2 crushesList`) rm crushesList #not required anymore #Sender Info SENDER_NAME="MCUG" SENDER_EMAILID="[email protected]" SENDER_PASSWORD="mischiefmanaged" #geeky reference #echo "Sending Email to : ${RECIPIENT_NAME[*]}" LOOPCOUNTER=${#RECIPIENT_EMAILID[@]} #counter for number of recipients #Create mail for each of the recipient for i in {1..$LOOPCOUNTER} do createMail $SENDER_NAME $SENDER_EMAILID ${RECIPIENT_NAME[$i]} ${RECIPIENT_EMAILID[$i]} done #Now send the email for j in {1..$LOOPCOUNTER} do curl --url "smtps://smtp.gmail.com:465" --ssl-reqd --mail-from "$SENDER_EMAILID" --mail-rcpt "${RECIPIENT_EMAILID[$j]}" \ --upload-file Crushes/"${RECIPIENT_NAME[$j]}":"${RECIPIENT_EMAILID[$j]}" --user "$SENDER_EMAILID:$SENDER_PASSWORD" --insecure done exit 0 ;; *) #default case if [ "$1" = h ] || [ $# -eq 0 ] then echo "GOD BLESS SAINT VALENTINE" echo "a - add new crushes" echo "d - delete crushes" echo "e - send email" else echo "No such option, dumbass. Correct options are : " echo "a - add new crushes" echo "d - delete crushes" echo "e - send email" fi exit 0 ;; esac
true
34d7cc4e7310ecc9861cc345a205360bf7049a51
Shell
puppyofkosh/bug-finding-tools
/daikon-differ/get_invariants.sh
UTF-8
644
3.203125
3
[]
no_license
#! /bin/bash cd $1 javac -g Main.java if [ $? -ne 0 ]; then exit 1 fi main_class=Main command="java daikon.Chicory --daikon-args='--no_text_output --config=../config.txt' --daikon $main_class" print_invariants="java daikon.PrintInvariants --format java $main_class.inv.gz" # First run the program with just passing test cases (pass in "pass") # nohierarchy means to process calls who get entered, but not exited (maybe due to an exception) eval $command pass eval $print_invariants > pass_invariants # Now run the program with both passing and testing cases (pass in "fail") eval $command both eval $print_invariants > fail_invariants
true
f1695ea2c41c54ded3acdd7bbe3731c07e126149
Shell
CNJJC/lterouter
/tr069/tr069/InternetGatewayDevice.WEB_GUI.Network.LTE_Setting.Status.UL_Mcs
UTF-8
325
2.734375
3
[]
no_license
#!/bin/bash . /opt/tr069/common.sh if [ "$ACTION"x = "get"x ];then cpin=`uci -c /opt/config get status.module.sim_stat 2>/dev/null` if [ "$cpin"x != "true"x ];then echo '--' exit 0 fi ul_mcs=`uci -c /opt/config get tr069.radio.ulMcs 2>/dev/null` echo $ul_mcs fi exit 0
true
d43a0aa6189cb0adee8efd1e3d83b109e3c6913d
Shell
GChalony/DocsBackup
/on_plugged.sh
UTF-8
1,338
4.1875
4
[]
no_license
#!/bin/bash LOG_FILE=/home/gregnix/Documents/Informatique/bash/DocsBackup/backups.log DAYS_THRESHOLD=1 BACKUP_FOLDER=/media/gregnix/GregWD/Backups NOTIFICATION_PIPE=/home/gregnix/Documents/Informatique/bash/DocsBackup/pipe log() { echo $(date +'%Y-%m-%d %H:%M:%S') $1 >> $LOG_FILE } sendNotif(){ echo $1 > $NOTIFICATION_PIPE } datediff() { d1=$(date -d "$1" +%s) d2=$(date -d "$2" +%s) echo $(( (d2 - d1) / 86400 )) } computeDateDelta(){ d1=$(date -I --date=${f#*_}) # Parsed date from folder name d2=$(date -I) # Current date delta=$(datediff "$d1" "$d2") # Number of days separating the two echo $delta } computeNbDifferentFiles(){ # Compute number of different files n=0; while read p; do folder=$(basename $p); d=$(diff -q $p $f/$folder | wc -l); n=$(( n + d )); done < folders_to_backup.txt echo $n } log "Starting backup scan" f=$(echo $BACKUP_FOLDER/Back* | tail -n 1) # Last backup folder delta=$(computeDateDelta) log "$delta jours depuis la dernière backup" n=$(computeNbDifferentFiles) log "$n fichiers differents" if [ $delta -ge $DAYS_THRESHOLD ] then log "Should do backup, sending notification" msg="$delta jours depuis la dernière backup ($n fichiers differents)" log "Notification: $msg" sendNotif "$msg" else log "No need for backup" fi log "Scan done"
true
4d5c2f5d7201ddbf08433abe392dcb02ed98b9b7
Shell
HU-TI-V1EOS/v1eos-Alex-D-G
/maakLogin
UTF-8
575
3.859375
4
[]
no_license
#!/bin/bash echo Geef username: read name; if [ -z "$name" ] then name=$(whoami) fi echo Welkom $name! index=0 while [ "$index" -eq 0 ] do echo Geef wachtwoord "(min 8 characters):" read password if [ "${#password}" -gt 7 ] then echo Confirm password: read newPassword if [ "$newPassword" = "$password" ] then echo Gegevens opgeslagen echo Username: $name >> $1 password=$(basename "$password" | md5sum) echo Password: $password >> $1 index=1 else echo Incorrect wachtwoord fi else echo Password te klein, fi done echo All "done!"
true
bd228cb43f9631f8f34b65210d4c46331565d3b6
Shell
kyldvs/jsphysics
/scripts/build
UTF-8
347
3.03125
3
[]
no_license
#! /bin/bash -e # directory this script is located in SCRIPTS=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) # some convenient aliases COMMONER="$SCRIPTS/commoner" ROOT="$SCRIPTS/.." BUILD="$ROOT/build" # for now build everything into the build directory /usr/bin/env node "$COMMONER" --relativize --use-provides-module "$ROOT/src" "$BUILD"
true
85430e098f5141deeb9ab1800ed2dd40cae7bc63
Shell
KBNLresearch/spatio-temporal-topics
/st/preprocess/set_years.sh
UTF-8
91
2.859375
3
[]
no_license
#!/bin/bash start=1914 end=1940 for (( i=$start; i<=$end; i++ )) do echo $i done
true
9fe00349bd1f85f1ee5375c746df265b747ac283
Shell
toddgator/tiberius
/library/rhel7/S22-nessus-agent.sh
UTF-8
2,162
2.984375
3
[]
no_license
#!/bin/bash # # Installs/Configures Nessus agent. . /etc/sdi/thig-settings rpm -Uvh http://thirdparty.thig.com/tenable/agents/${OSANDVERSION}/NessusAgent-${OSANDVERSION}-latest.rpm # Remove any old associations # CMH: fairly certain this doesn't work though /opt/nessus_agent/sbin/nessuscli agent unlink --force sleep 5 # Install new agent with new UUID based on network location export ROLE=$(echo ${HOSTNAME:6:3} | tr [:upper:] [:lower:]) export ENVIRONMENT=$(echo ${HOSTNAME:0:1} | tr [:upper:] [:lower:]) export SUBENVIRONMENT=$(echo ${HOSTNAME:9:1} | tr [:upper:] [:lower:]) echo $ROLE echo $ENVIRONMENT echo $SUBENVIRONMENT case $ENVIRONMENT in S|s) agent_group="Linux Servers - Prod - Internal" ;; X|x) agent_group="Linux Servers - Prod - DMZ" ;; B|b) case $ROLE in web) agent_group="Linux Servers - Beta - DMZ" ;; web) agent_group="Linux Servers - Beta - DMZ" ;; *) agent_group="Linux Servers - Beta - Internal" ;; esac ;; D|d) case $SUBENVIRONMENT in O|o) agent_group="Linux Servers - Dev - Orange" ;; Y|y) agent_group="Linux Servers - Dev - Yellow" ;; *) agent_group="Linux Servers - Dev - Unknown" ;; esac ;; Q|q) case $SUBENVIRONMENT in U|u) agent_group="Linux Servers - QA - UAT" ;; Q|q) agent_group="Linux Servers - QA - UnitQA" ;; R|r) agent_group="Linux Servers - QA - Red" ;; G|g) agent_group="Linux Servers - QA - Green" ;; P|p) agent_group="Linux Servers - QA - Purple" ;; 0) agent_group="Linux Servers - QA - Unknown" ;; *) ;; esac ;; *) agent_group="Linux Servers - Unknown - Unknown" ;; esac /opt/nessus_agent/sbin/nessuscli agent link --key=2b1a12aab851259dd90a315f88a5543584ebd4733892788fbccb1560a7b74737 --name=$(hostname) --groups="${agent_group}" --host=nflgnvtnm01.thig.com --port=8834 /sbin/service nessusagent start chkconfig nessusagent on
true
ab653ecfcad56f8f13962efbeebda8758494e167
Shell
uomsystemsbiology/hbgm
/mtt/bin/trans/ibg2abg_m
UTF-8
2,878
3.875
4
[]
no_license
#! /bin/sh ###################################### ##### Model Transformation Tools ##### ###################################### # Bourne shell script: ibg2abg_m # Based on rbg2abg_m while [ -n "`echo $1 | grep '^-'`" ]; do case $1 in -I ) info=info ;; -nounits ) units=no ;; *) echo "$1 is an invalid argument - ignoring" ;; esac shift done # Set up some vars sys=$1 lbl_file=${sys}_lbl.txt abg_file=${sys}_abg.m err=mtt_error.txt log=ibg2abg_m_${sys}.log # Remove the old log file rm -f ibg2abg_m.log rm -f ${abg_file} # Inform user echo Creating ${abg_file} # Use matrix manipulation to accomplish the transformation ${MATRIX} > ${log} 2> ${err} <<EOF name = '$1' infofile = fopen('mtt_info.txt', 'wt'); errorfile = fopen('mtt_error.txt', 'wt'); ## Interpret data from the ibg representation ibonds = $1_ibg; bonds = ibonds.bonds; [bonds,components,n_vector_bonds] = ibg2abg(name,bonds, \ infofile,errorfile) ## Write the acausal bond graph m-file write_abg(name,bonds,components,n_vector_bonds); EOF # Set the units for the abg file test_units() { grep '^[\s]*[#|%]UNITS' < ${lbl_file} >/dev/null if [ $? = "0" ]; then grep '^[\s]*[#|%]UNITS' < ${lbl_file} |\ gawk '{ printf("mtt_units.sh %s %s %s %s %s\n", sys, $2, $3, $4, $5) }' sys=${sys} | sh | grep ERROR if [ $? = "0" ]; then echo " " *MTT_ERROR: domains and units are not OK - exiting exit 1 else echo " " domains and units are OK fi else echo " no domains or units declared" fi } check_ports_exist() { declared_ports=`grep '^[\s]*[#|%]UNITS' < ${lbl_file} | gawk '{print $2}'` for declared_port in $declared_ports; do grep "${sys}\.ports\.${declared_port}\.type" ${abg_file} >/dev/null if [ $? = "1" ]; then echo "*MTT_ERROR: Units declared for non-existent port ${declared_port}" exit 1 fi done } set_units() { grep '^[\s]*[#|%]UNITS' < ${lbl_file} |\ gawk '{ printf(" %s.ports.%s.domain = \"%s\";\n", sys, $2, $3); printf(" %s.ports.%s.units.effort = \"%s\";\n", sys, $2, $4); printf(" %s.ports.%s.units.flow = \"%s\";\n", sys, $2, $5); }' sys=${sys} } if [ -z "$units" ]; then echo Checking port domains and units check_ports_exist; test_units; echo "## Port domain and units" >> ${abg_file} set_units >> ${abg_file} fi # Append any VAR declarations if [ -f "$1_lbl.txt" ]; then echo "## Explicit variable declarations" >> ${abg_file} grep '^[\s]*[%|#][V|P]AR' $1_lbl.txt | tr '%' '#' >> ${abg_file} grep '^[\s]*[%|#]NOT[V|P]AR' $1_lbl.txt | tr '%' '#' >> ${abg_file} fi # Close off the function echo "endfunction" >> ${abg_file} # Errors and info if [ "$info" = "info" ]; then cat mtt_info.txt fi if mtt_error mtt_error.txt; then exit 0 else exit 1 fi
true
7aa2c53b9a50781b191c539a72da84febe02daff
Shell
iiwoai/php
/7.0/php_nginx/start.sh.bak
UTF-8
1,942
3.3125
3
[ "MIT" ]
permissive
#!/bin/bash # Disable Strict Host checking for non interactive git clones # Enable custom nginx config files if they exist if [ -f /var/www/html/conf/nginx/nginx.conf ]; then cp /var/www/html/conf/nginx/nginx.conf /etc/nginx/nginx.conf fi if [ -f /var/www/html/conf/nginx/default.conf ]; then cp /var/www/html/conf/nginx/default.conf /etc/nginx/conf.d/default.conf fi # Display PHP error's or not if [[ "$ERRORS" != "1" ]] ; then echo php_flag[display_errors] = off >> /usr/local/etc/php-fpm.conf else echo php_flag[display_errors] = on >> /usr/local/etc/php-fpm.conf fi # Display Version Details or not if [[ "$HIDE_NGINX_HEADERS" == "0" ]] ; then sed -i "s/server_tokens off;/server_tokens on;/g" /etc/nginx/nginx.conf else sed -i "s/expose_php = On/expose_php = Off/g" /usr/local/etc/php-fpm.conf fi #Display errors in docker logs if [ ! -z "$PHP_ERRORS_STDERR" ]; then echo "log_errors = On" >> /usr/local/etc/php/php.ini echo "error_log = /dev/stderr" >> /usr/local/etc/php/php.ini fi # Increase the memory_limit if [ ! -z "$PHP_MEM_LIMIT" ]; then sed -i "s/memory_limit = 128M/memory_limit = ${PHP_MEM_LIMIT}M/g" /usr/local/etc/php/php.ini fi # Increase the post_max_size if [ ! -z "$PHP_POST_MAX_SIZE" ]; then sed -i "s/post_max_size = 100M/post_max_size = ${PHP_POST_MAX_SIZE}M/g" /usr/local/etc/php/php.ini fi # Increase the upload_max_filesize if [ ! -z "$PHP_UPLOAD_MAX_FILESIZE" ]; then sed -i "s/upload_max_filesize = 100M/upload_max_filesize= ${PHP_UPLOAD_MAX_FILESIZE}M/g" /usr/local/etc/php/php.ini fi if [ ! -z "$PUID" ]; then if [ -z "$PGID" ]; then PGID=${PUID} fi deluser www-data addgroup -g ${PGID} www-data adduser -D -S -h /var/cache/www-data -s /sbin/nologin -G www-data -u ${PUID} www-data else if [ -z "$SKIP_CHOWN" ]; then chown -Rf www-data.www-data /var/www/html fi fi # Start supervisord and services exec /usr/bin/supervisord -n -c /etc/supervisord.conf
true
5d959f01683cf101819573ad155526bf94d92ff1
Shell
hao810/shell
/cut_nginx_logs.sh
UTF-8
795
3.3125
3
[]
no_license
#!/bin/bash #初始化 LOGS_PATH=/usr/local/nginx/logs YESTERDAY=$(date -d "yesterday" +%Y%m%d) #按天切割日志 mv ${LOGS_PATH}/access.log ${LOGS_PATH}/access_${YESTERDAY}.log #进行打包 cd ${LOGS_PATH} tar -zcvf access_${YESTERDAY}.log.tar.gz access_${YESTERDAY}.log --remove-files #向nginx主进程发送USR1信号,重新打开日志文件,否则会继续往mv后的文件写数据的。原因在于:linux系统中,内核是根据文件描述符来找文件的。如果不这样操作导致日志切割失败。 kill -USR1 `ps axu | grep "nginx: master process" | grep -v grep | awk '{print $2}'` #删除7天前的日志 cd ${LOGS_PATH} find . -mtime +30 -name "*20[1-9][3-9]*" | xargs rm -f #或者 #find . -mtime +30 -name "ilanni.com_*" | xargs rm -f exit 0
true
36e7bcfa2b07210d796e7793113c0379a4894f50
Shell
co0p/drone-heroku-container-plugin
/plugin.sh
UTF-8
1,128
3.328125
3
[]
no_license
#!/bin/bash # needed variables taken from environment TOKEN=${PLUGIN_TOKEN} CONTAINER=${PLUGIN_CONTAINER} APP=${PLUGIN_APP} # will be used to release app to heroku PAYLOAD='{ "updates": [ { "type": "web", "docker_image": "IMAGE_ID_TO_REPLACE" } ] }' echo "using Container=${CONTAINER}" echo "using APP=${APP}" # download the container image first echo "downloading docker image ..." docker pull ${CONTAINER} # tag and push docker to heroku registry echo ${TOKEN} | docker login --username=_ --password-stdin registry.heroku.com docker tag ${CONTAINER} registry.heroku.com/${APP}/web docker push registry.heroku.com/${APP}/web # get the image id and adjust the payload IMAGE_ID=$(docker inspect ${CONTAINER} --format {{.Id}}) echo "using imageid=${IMAGE_ID}" PATCH_PAYLOAD=${PAYLOAD/IMAGE_ID_TO_REPLACE/$IMAGE_ID} # deploy (release) the image to the specified heroku app via api call curl -u _:${TOKEN} -n -X PATCH https://api.heroku.com/apps/${APP}/formation \ -d "${PATCH_PAYLOAD}" \ -H "Content-Type: application/json" \ -H "Accept: application/vnd.heroku+json; version=3.docker-releases"
true
fba3e40159fa979c57fcc4b25f34dd17ebd5f37c
Shell
jadia/gvisor_analysis
/configs/import_config.sh
UTF-8
3,333
3.15625
3
[]
no_license
#!/bin/bash #### Constants #### # TEST_IMPORT_LIST=() TEST_FILE="test.sh" #### import #### # import django DJANGO_FOLDER_PATH="experiments/import/django/" DJANGO_APP_NAME="django" DJANGO_NUM_TRAILS=5 # import flask FLASK_FOLDER_PATH="experiments/import/flask/" FLASK_APP_NAME="flask" FLASK_NUM_TRAILS=5 # import jinja2 JINJA2_FOLDER_PATH="experiments/import/jinja2/" JINJA2_APP_NAME="jinja2" JINJA2_NUM_TRAILS=5 # import matplotlib MATPLOTLIB_FOLDER_PATH="experiments/import/matplotlib/" MATPLOTLIB_APP_NAME="matplotlib" MATPLOTLIB_NUM_TRAILS=5 # import numpy NUMPY_FOLDER_PATH="experiments/import/numpy/" NUMPY_APP_NAME="numpy" NUMPY_NUM_TRAILS=5 # import pip PIP_FOLDER_PATH="experiments/import/pip/" PIP_APP_NAME="pip" PIP_NUM_TRAILS=5 # import requests REQUESTS_FOLDER_PATH="experiments/import/requests/" REQUESTS_APP_NAME="requests" REQUESTS_NUM_TRAILS=5 # import setuptools SETUPTOOLS_FOLDER_PATH="experiments/import/setuptools/" SETUPTOOLS_APP_NAME="setuptools" SETUPTOOLS_NUM_TRAILS=5 # import sqlalchemy SQLALCHEMY_FOLDER_PATH="experiments/import/sqlalchemy/" SQLALCHEMY_APP_NAME="sqlalchemy" SQLALCHEMY_NUM_TRAILS=5 # import Standard STANDARD_FOLDER_PATH="experiments/import/Standard/" STANDARD_APP_NAME="requests" STANDARD_NUM_TRAILS=5 # import werkzeug WERKZEUG_FOLDER_PATH="experiments/import/werkzeug/" WERKZEUG_APP_NAME="werkzeug" WERKZEUG_NUM_TRAILS=5 # check python version CHECK_VERSION_FOLDER_PATH="experiments/import/pyversion/" CHECK_VERSION_APP_NAME="pyversion" CHECK_VERSION_NUM_TRAILS=1 # Generate list of tests generate_cmds() { RUNTIME=$1 # import django TEST_IMPORT_LIST+=("$DJANGO_FOLDER_PATH$TEST_FILE $DJANGO_FOLDER_PATH $DJANGO_APP_NAME $RUNTIME $DJANGO_NUM_TRAILS") # import flask TEST_IMPORT_LIST+=("$FLASK_FOLDER_PATH$TEST_FILE $FLASK_FOLDER_PATH $FLASK_APP_NAME $RUNTIME $FLASK_NUM_TRAILS") # import jinja2 TEST_IMPORT_LIST+=("$JINJA2_FOLDER_PATH$TEST_FILE $JINJA2_FOLDER_PATH $JINJA2_APP_NAME $RUNTIME $JINJA2_NUM_TRAILS") # import matplotlib TEST_IMPORT_LIST+=("$MATPLOTLIB_FOLDER_PATH$TEST_FILE $MATPLOTLIB_FOLDER_PATH $MATPLOTLIB_APP_NAME $RUNTIME $MATPLOTLIB_NUM_TRAILS") # import numpy TEST_IMPORT_LIST+=("$NUMPY_FOLDER_PATH$TEST_FILE $NUMPY_FOLDER_PATH $NUMPY_APP_NAME $RUNTIME $NUMPY_NUM_TRAILS") # import pip TEST_IMPORT_LIST+=("$PIP_FOLDER_PATH$TEST_FILE $PIP_FOLDER_PATH $PIP_APP_NAME $RUNTIME $PIP_NUM_TRAILS") # import requests TEST_IMPORT_LIST+=("$REQUESTS_FOLDER_PATH$TEST_FILE $REQUESTS_FOLDER_PATH $REQUESTS_APP_NAME $RUNTIME $REQUESTS_NUM_TRAILS") # import setuptools TEST_IMPORT_LIST+=("$SETUPTOOLS_FOLDER_PATH$TEST_FILE $SETUPTOOLS_FOLDER_PATH $SETUPTOOLS_APP_NAME $RUNTIME $SETUPTOOLS_NUM_TRAILS") # import sqlalchemy TEST_IMPORT_LIST+=("$SQLALCHEMY_FOLDER_PATH$TEST_FILE $SQLALCHEMY_FOLDER_PATH $SQLALCHEMY_APP_NAME $RUNTIME $SQLALCHEMY_NUM_TRAILS") # import Standard TEST_IMPORT_LIST+=("$STANDARD_FOLDER_PATH$TEST_FILE $STANDARD_FOLDER_PATH $STANDARD_APP_NAME $RUNTIME $STANDARD_NUM_TRAILS") # import werkzeug TEST_IMPORT_LIST+=("$WERKZEUG_FOLDER_PATH$TEST_FILE $WERKZEUG_FOLDER_PATH $WERKZEUG_APP_NAME $RUNTIME $WERKZEUG_NUM_TRAILS") # check python version TEST_IMPORT_LIST+=("$CHECK_VERSION_FOLDER_PATH$TEST_FILE $CHECK_VERSION_FOLDER_PATH $CHECK_VERSION_APP_NAME $RUNTIME $CHECK_VERSION_NUM_TRAILS") }
true
96609e89f40e6068ff3786bc1be3b9b9609b8849
Shell
JoeAndrew/raspap-tools
/install_wlan_drivers_8812au_88x2bu.sh
UTF-8
1,443
3.40625
3
[]
no_license
#!/bin/bash # Install Realtek wlan drivers # - only drivers for rtl8812au and rtl88x2bu devices are installed # - on older single core Raspis the compilation can take a long time (hours)! # - the drivers are installed using DKMS and will automatically be recompiled after a kernel update # which again might take a long time # # CZ 2020 echo "Install essential packages to compile the drivers" sudo apt --yes install git dkms build-essential raspberrypi-kernel-headers bc echo "Get the driver packages from Github" git clone https://github.com/aircrack-ng/rtl8812au.git git clone https://github.com/cilynx/rtl88x2bu.git # to avoid compiler error from __DATE__ macros -> comment these lines find . -name "*.c" -exec grep -li __date__ {} \; | xargs sed -i '/^[^\/]/ s/\(.*__DATE__.*$\)/\/\/\ \1/' echo "Compile the 8812au driver ..." cd rtl8812au sed -i 's/CONFIG_PLATFORM_I386_PC = y/CONFIG_PLATFORM_I386_PC = n/g' Makefile sed -i 's/CONFIG_PLATFORM_ARM_RPI = n/CONFIG_PLATFORM_ARM_RPI = y/g' Makefile sudo make dkms_install cd ~/ echo "Compile the 88x2bu driver ..." cd rtl88x2bu sed -i 's/I386_PC = y/I386_PC = n/' Makefile sed -i 's/ARM_RPI = n/ARM_RPI = y/' Makefile VER=$(sed -n 's/\PACKAGE_VERSION="\(.*\)"/\1/p' dkms.conf) sudo rsync -rqhP ./ /usr/src/rtl88x2bu-${VER} sudo dkms add -m rtl88x2bu -v ${VER} sudo dkms build -m rtl88x2bu -v ${VER} sudo dkms install -m rtl88x2bu -v ${VER} sudo modprobe 88x2bu echo "Thats it ..."
true
740acf0ce48cf316a5b2d8c891a24b3ea20b357b
Shell
ilia92/small_scripts
/tok_energo.sh
UTF-8
567
2.984375
3
[]
no_license
#!/bin/bash printf "\ntok.sh [old_day] [old_night] [new_day] [new_night]\n\n" day_tarif=0.14184 night_tarif=0.05442 other_plus=0.05398 day_kwh=$(($3-$1)) night_kwh=$(($4-$2)) printf "Day kWh:\t$day_kwh\nNight kWh:\t$night_kwh\n " day_pr=`echo "$day_kwh * ($day_tarif + $other_plus) * 1.2" | bc -l` night_pr=`echo "$night_kwh * ($night_tarif + $other_plus) * 1.2" | bc -l` end_pr=`echo "$day_pr + $night_pr" | bc -l` printf "\n" printf "Day price:\t$day_pr\n" printf "Night price:\t$night_pr\n" printf "\n==========\n" printf "\nEnd price: $end_pr BGN\n\n"
true
db593ec949c1c00afdba754b80c31a6e7ffad24f
Shell
BCD-Metalproducts/packer
/packer.sh
UTF-8
273
2.84375
3
[]
no_license
#! /bin/bash cd $(dirname "$0") image="$(echo $1 | cut -d '.' -f1)" checksum="$(md5sum iso/$image.iso | cut -d ' ' -f1)" packer build -var "iso_checksum=$checksum" "$image.json" mkdir -p "images/$image" mv output-qemu/packer-qemu "images/$image.qcow2" rm -rf output-qemu
true
8c949714b6aaafc710a4e16a9b629873304eaf89
Shell
marXtevens/CMS-370-GCCLIB
/cp2vm370.sh
UTF-8
979
3
3
[ "LicenseRef-scancode-public-domain", "Unlicense" ]
permissive
#!/bin/sh # Copy and build source files to VM370 # STOP MSYS2 rewriting directory paths in the docker container export MSYS2_ARG_CONV_EXCL="vm370:;/opt" docker kill vm370 docker pull adriansutherland/vm370:builder docker run --rm -d -p 3270:3270 -p 8038:8038 -p 3505:3505 --name vm370 adriansutherland/vm370:builder # docker run --rm -d -p 3270:3270 -p 8038:8038 -p 3505:3505 --name vm370 adriansutherland/vm370local:latest yata -c -f tmp.txt echo "USERID CMSUSER\n:READ YATA TXT " > yata.txt cat tmp.txt >> yata.txt docker cp yata.txt vm370:/opt/hercules/vm370/io rm yata.txt rm tmp.txt docker exec vm370 bash -c "cd /opt/hercules/vm370/io && yata -x" docker exec vm370 bash -c "rm /opt/hercules/vm370/io/yata.txt" docker cp cmsbuild.sh vm370:/opt/hercules/vm370/io docker cp cmsinstall.sh vm370:/opt/hercules/vm370/io docker exec vm370 bash -c "cd /opt/hercules/vm370/io && ./cmsbuild.sh" docker exec vm370 bash -c "cd /opt/hercules/vm370/io && ./cmsinstall.sh"
true
a8d75dab0e93bed18688e8f0da40a18b2119cee3
Shell
shineit/iFlyQA
/zookeeper_kafka/script/kafka_check_online.sh
UTF-8
392
3.03125
3
[]
no_license
#!/bin/bash source ~/.bashrc DATE=$(date) LogName=$(date +"%Y%m%d") WORK_DIR=/opt/research/third_party/kafka cd $WORK_DIR #redis check MM=$(ps -C kafka-server-start.sh --no-headers|wc -l) if [ $MM = "0" ]; then echo "$DATE kafka is not ok" >> $WORK_DIR/kafka_check.$LogName.logs ./bin/kafka-server-start.sh config/server_test.properties >> $WORK_DIR/kafka_check.$LogName.logs & fi
true
cff71571acf2d4e093f99ad82d3fe03ac91fa91d
Shell
tobinsouth/covid19-forecasting-aus
/scenario_modelling/scenario_pipeline.sh
UTF-8
1,215
3.015625
3
[ "MIT" ]
permissive
#!/bin/bash DATADATE=$1 # Date of NNDSS data file NSIMS=$2 # Total number of simulations to run SCENARIO='no_reversion' SCENARIODATE='2021-07-05' # This doesn't matter for a no-reversion scenario # Assumes you've already run an EpyReff for the date. If not, uncomment the following line. # jid_estimator=$(sbatch --parsable sbatch_run_scripts/phoenix_run_estimator.sh ${DATADATE}) # We split the scenario params into the type and the date. It will apply the sec jid_posteriors_a=$(sbatch --parsable sbatch_run_scripts/phoenix_run_posteriors.sh ${DATADATE} ${SCENARIO} ${SCENARIODATE}) # Here the scenario parameter is just a filename extention. jid_simulate_a=$(sbatch --parsable --dependency=afterok:$jid_posteriors_a sbatch_run_scripts/phoenix_all_states.sh ${NSIMS} ${DATADATE} Delta "${SCENARIO}${SCENARIODATE}") # You need to make sure every state has a results/STATEDATE_sim_R_L_daysDelta[SCENARIO].parquet file. You can just rename parquets for states that aren't relevant. collate states doesn't work with a single state. jid_savefigs_and_csv=$(sbatch --parsable --dependency=afterok:$jid_simulate_a sbatch_run_scripts/phoenix_final_plots_csv.sh ${NSIMS} ${DATADATE} Delta "${SCENARIO}${SCENARIODATE}")
true
84299fad7d206e4c4e51e5ab989a71dd95d7a66c
Shell
brianhlin/docker-frontier-squid
/start-frontier-squid.sh
UTF-8
425
3.296875
3
[]
no_license
#!/bin/bash # Wrapper script for starting & stopping frontier squid from supervisord # stop squid if supervisord sends a TERM signal trap "/etc/init.d/frontier-squid stop" TERM # we tell squid to run in the foreground, but by telling the # shell to start it in the background and waiting for it we # prevent the shell from ignoring signals export SQUID_START_ARGS="--foreground" /etc/init.d/frontier-squid start & wait
true
f3389b2b0164aead50fc4c04a0811455f6198d73
Shell
julie777/user_environment
/dot_files/bash/history.sh
UTF-8
1,527
3.90625
4
[]
no_license
#!/bin/bash ## history.sh ## # get history out of home dir HISTDIR=${HOME}/.history [ -d $HISTDIR ] || mkdir --mode=0700 $HISTDIR [ -d $HISTDIR ] && chmod 0700 $HISTDIR # each shell has its own history file HISTFILE=$HISTDIR/history.$$ # don't put duplicate lines or lines starting with space in the history. # See bash(1) for more options HISTCONTROL=ignorespace:ignoredups:erasedups # keep enough history to make it worthwhile HISTSIZE=100000 HISTFILESIZE=100000 shopt -s histappend export HISTTIMEFORMAT="%Y-%m-%d %H:%M:%S " # clean out history more than 30 days old echo "Removing old history files." find ${HISTDIR} -type f -mtime +30 -print -delete # load the previous history history -r $(ls -t ${HISTDIR} | head -n 5) # Notes: # To quote the manpage: If set, the value is executed as a command # prior to issuing each primary prompt. # export PROMPT_COMMAND='history -a' # Save and reload the history after each command finishes # export PROMPT_COMMAND="history -a;"$PROMPT # commands # history -a # append current history to file # history -c # clear history list # history -r; $PROMPT_COMMAND" #read the history file and add commands to current history # So every time my command has finished, it appends the unwritten history # item to ~/.bash_history before displaying the prompt (only $PS1) again. # ## close any old history file by zeroing HISTFILESIZE # HISTFILESIZE=0 # # history/* | sort | uniq -c |sort -n -r |less # or # cut -f1-2 "-d " .history/* | sort | uniq -c |sort -n -r |less #
true
f96a31f3f0396e08f1672a7f5eb952fef6d440af
Shell
otto-de/queuemock
/examples/transform/receive_response.sh
UTF-8
519
3.078125
3
[ "Apache-2.0" ]
permissive
#!/bin/bash export AWS_DEFAULT_REGION=eu-central-1 response=$(aws --no-sign-request --endpoint-url http://localhost:9324 sqs receive-message --queue-url http://localhost:9324/queue/Response) echo "Response from SQS" echo $response if [ -z "$response" ] then exit fi aws --no-sign-request --endpoint-url http://localhost:9324 sqs delete-message --queue-url http://localhost:9324/queue/Response --receipt-handle $(echo $response | jq '.Messages[0].ReceiptHandle') if [ $? -eq 0 ] then echo "Message deleted" fi
true
690f41d22dd51b83da2744bf99db244db3425eac
Shell
argsm/miscode
/Shell/rename.sh
UTF-8
228
3.296875
3
[ "MIT" ]
permissive
#!/bin/bash # rename your files # If your file contain a space, please remove it before you this script for file in `ls | grep .txt` do newfile=`echo $file | sed 's/pattern/replace/g'` mv "$file" "${newfile}" done
true
383ee80285ba4b854d8cb1a8686e6980a4155ec2
Shell
altlinux/girar
/bin/girar-task-run
UTF-8
7,935
4.03125
4
[]
no_license
#!/bin/sh -efu . girar-sh-functions . shell-args PROG0="$PROG" PROG='task run' show_help() { cat <<EOF $PROG - queue a task for build Usage: $PROG [options] [<task id>] If no task id is specified, the latest task created by the current user will be choosen. Options: -m <word> use the given <word> as the reason for this build; -m - read full build reason message from stdin; --dry-run stop right before queueing the task; --fail-early stop building the task after the first error; --fail-late do not stop building the task after the first error; --test-only stop after test, do not commit the task; --commit commit the task after tests; --hurry enable undocumented swift mode; --unhurry disable undocumented swift mode; --help show this text and exit. EOF exit } TEMP="$(getopt -n "$PROG" -o m: -l commit,dry-run,fail-early,fail-late,hurry,test-only,unhurry,help -- "$@")" || show_usage eval set -- "$TEMP" dry_run= fail_mode= test_mode= swift_mode= task_msg= while :; do case "${1-}" in --) shift; break ;; -m) shift [ "$#" -ge 1 ] || show_usage 'not enough arguments.' task_msg="$1" ;; --dry-run) dry_run=1 ;; --fail-early|--fail-late) fail_mode="$1" ;; --commit|--test-only) test_mode="$1" ;; --hurry|--unhurry) swift_mode="$1" ;; --help) show_help ;; *) break; esac shift done if [ "$#" -gt 1 ]; then show_usage 'too many arguments.' fi if [ -n "$task_msg" ]; then task_msg="$(if [ "$task_msg" = '-' ]; then message 'Go ahead and type the text of your message.' cat else printf %s "$task_msg" fi | tr -cd '[:print:]\n' | tr -s '[:space:]' ' ' | head -c 1024)" task_msg="${task_msg% }" printf %s "$task_msg" | tr -cd '[:alpha:]' | grep -qs ^. || fatal 'invalid message' fi cd "$TASKS_DIR" id="$(PROG="$PROG" girar-task-find-current "$@")" cd "$id" # obtain an exclusive lock on the TASKS structure exec <. flock -n 0 || fatal "task #$id is locked" repo="$(cat task/repo)" repo="$(girar-normalize-repo-name "$repo")" # Source per-repository config file. conf="$CONF_DIR/repo/$repo" if [ -s "$conf" ]; then . "$conf" fi owner="$(cat task/owner)" state="$(cat task/state)" case "$state" in NEW|SWEPT|TESTED|EPERM|FAILED) ;; AWAITING|POSTPONED) fatal "task #$id is already scheduled for run" ;; BUILDING|FAILING|PENDING|COMMITTING) fatal "task #$id is a work in progress" ;; DONE) fatal "task #$id is already successfully processed" ;; *) fatal "task #$id is in unrecognized state \"$state\"" ;; esac if [ -z "$test_mode" ]; then if [ -f task/test-only ]; then test_mode='--test-only' else test_mode='--commit' fi fi if [ -z "$swift_mode" ]; then if [ -f task/swift ]; then swift_mode='--hurry' else swift_mode='--unhurry' fi fi if [ "$test_mode $swift_mode" = '--commit --hurry' ]; then fatal 'cannot commit in a hurry' fi [ -n "$(find acl/approved -mindepth 1 -maxdepth 1 -name '[1-7]*' -type d 2>/dev/null)" ] || fatal "cannot run empty task #$id" if [ "$owner" = "$GIRAR_USER" ] || girar-check-superuser "$repo"; then case "$state" in EPERM|SWEPT|TESTED) if [ "$test_mode" = '--commit' ]; then [ -s plan/check_acl ] && girar-check-task-perms "$id" || state=EPERM fi ;; esac else [ "$state" = EPERM ] && [ -s plan/check_acl ] && girar-check-task-perms "$id" || fatal "task #$id belongs to $owner" fi nums=$(gear_nums) delnums= check_copy_del() { local i package action a_repo rc=0 for i in $nums; do [ -s gears/$i/package -a ! -s gears/$i/dir ] || continue package="$(cat gears/$i/package)" if [ -s gears/$i/copy_repo ]; then action=copy a_repo="$(cat gears/$i/copy_repo)" else action=delete a_repo=$repo delnums="$delnums $i" fi girar-check-package-in-repo "$package" "$a_repo" || { message "task #$id: subtask #$i: invalid request to $action nonexistent package \`$package' from \`$a_repo'" rc=1 } done return $rc } check_srpm() { local i nevr s_name s_evr r_evr rc=0 for i in $nums; do [ -s gears/$i/srpm -a -s gears/$i/nevr ] || continue nevr="$(cat gears/$i/nevr)" sid="$(cat gears/$i/sid)" GIRAR_ALLOW_SAME_NEVR= if [ -f gears/$i/rebuild ]; then GIRAR_ALLOW_SAME_NEVR=1 fi s_name="${nevr% *}" s_evr="${nevr#* }" r_evr="$(GIRAR_ALLOW_SAME_NEVR=$GIRAR_ALLOW_SAME_NEVR \ girar-check-nevr-in-repo "$s_name" "$s_evr" "$repo")" || { message "task #$id: subtask #$i: package \`$(cat gears/$i/srpm)' is not newer than \`$s_name-$r_evr' in \`$repo'" rc=1 } [ "$rc" -ne 0 ] || girar-check-sid "$s_name" "$s_evr" "$sid" "$repo" || { case $? in 1) message "package \`$s_name' version \`$s_evr' is already built but no source id has been recorded" rc=1 ;; 2) message "package \`$s_name' version \`$s_evr' is already built from a different source" rc=1 ;; esac } done return $rc } check_depends() { [ -f task/depends ] || return 0 { # Obtain an exclusive lock on task/depends file. flock 0 local i k rc=0 for i in $(cat); do k="_$(($i/1024))" if [ -d "$TASKS_DIR/archive/done/$k/$i/task" ]; then girar-task-rmdep "$i" elif [ ! -d "$TASKS_DIR/$i/task" ]; then message "required task #$i not found" rc=1 fi done return $rc } < task/depends } check_copy_del check_srpm check_depends [ -n "$task_msg" ] || [ -s task/message ] || { [ -z "$delnums" ] || show_usage "task #$id contains explicit package removals, please specify a reason for that" [ -z "${GIRAR_ACL_MAINT_GROUP-}" ] || [ "$test_mode" = '--test-only' ] || show_usage "a reason must be specified for '$repo' repository" } try=$(cat task/try 2>/dev/null ||:) if [ -n "$try" ]; then try=$(($try+1)) else try=1 fi iter=1 next_state=AWAITING if [ "$state" = TESTED ] && [ "$test_mode" = '--commit' ]; then next_state=PENDING fi if [ "$next_state" = AWAITING -a -s task/depends ]; then next_state=POSTPONED fi if [ -n "$dry_run" ]; then echo >&2 "task #$id: try #$try could be placed to $next_state queue" exit 0 fi # create group writable directories for build results mkdir -pm3775 install logs mail mail/bugmail mail/cvemail report mkdir -pm2775 arepo build plan # create group writable files required for build (umask 002; touch logs/events.$try.$iter.log task/iter task/try) || false # set/remove fail-early flag if requested case "$fail_mode" in --fail-early) [ -f task/fail-early ] || touch task/fail-early ;; --fail-late) [ ! -f task/fail-early ] || rm task/fail-early ;; esac # set/remove test-only flag if requested case "$test_mode" in --test-only) [ -f task/test-only ] || touch task/test-only ;; --commit) [ ! -f task/test-only ] || rm task/test-only ;; esac # set/remove swift flag if requested case "$swift_mode" in --hurry) [ -f task/swift ] || touch task/swift ;; --unhurry) [ ! -f task/swift ] || rm task/swift ;; esac # remove "abort" flag rm -f task/abort # save the message for posterity [ -z "$task_msg" ] || (umask 002; printf '%s\n' "$task_msg" > task/message) || false echo "$GIRAR_USER" > task/run logger -t "$PROG0" "user=$GIRAR_USER task=$id try=$try repo=$repo" trap '' HUP INT QUIT PIPE TERM echo $try > task/try echo 1 > task/iter girar-task-change-state "$id" "$next_state" if [ "$next_state" = POSTPONED ]; then # The status of dependencies listed in task/depends might have changed # since the last check while the task was not in POSTPONED state. # Re-run the check to update task/depends if necessary. check_depends || { next_state=FAILED girar-task-change-state "$id" "$next_state" girar-webapi-task update "$id" exit 1 } [ -s task/depends ] || { next_state=AWAITING girar-task-change-state "$id" "$next_state" } fi girar-webapi-task update "$id" echo >&2 "task #$id: try #$try is $next_state, result will be emailed to $owner@$EMAIL_DOMAIN"
true
3bcf18ef9333725d83ba5b34fd47f83f917f457e
Shell
falconray0704/bak_sysCfg
/ubt/docker/docker.sh
UTF-8
3,037
3.6875
4
[]
no_license
#!/bin/bash set -o nounset set -o errexit #set -x install_DockerCompose_func() { # refer to https://docs.docker.com/compose/install/#install-compose sudo curl -L https://github.com/docker/compose/releases/download/1.23.2/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose sudo chmod +x /usr/local/bin/docker-compose # https://docs.docker.com/compose/completion/#install-command-completion sudo curl -L https://raw.githubusercontent.com/docker/compose/1.23.2/contrib/completion/bash/docker-compose -o /etc/bash_completion.d/docker-compose # check docker-compose --version } uninstall_DockerCompose_func() { sudo rm /usr/local/bin/docker-compose } install_Docker_func() { # https://docs.docker.com/install/linux/docker-ce/ubuntu/#install-docker-ce-1 sudo apt-get update sudo apt-get install docker-ce sudo docker run hello-world # use Docker as a non-root user echo "User:$USER" sudo usermod -aG docker $USER #sudo reboot } check_Docker_Env_func() { docker info docker version sudo docker run hello-world } uninstall_old_versions_func() { sudo apt-get remove docker docker-engine docker.io docker-ce docker-ce-cli sudo apt-get purge docker docker-engine docker.io docker-ce docker-ce-cli sudo rm -rf /var/lib/docker /var/lib/docker-engine } install_repo_func() { sudo apt-get install apt-transport-https ca-certificates curl software-properties-common curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - echo "Verify that you now have the key with the fingerprint 9DC8 5822 9FC7 DD38 854A E2D8 8D81 803C 0EBF CD88" sudo apt-key fingerprint 0EBFCD88 sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" #sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu xenial stable" } install_utils_func() { sudo apt-get install bridge-utils } usage_func() { echo "Supported functionalities:" echo "[uninstallOldVersions]" echo "[installRepo]" echo "[installDocker]" echo "[checkDocker]" echo "[installDockerCompose]" echo "[uninstallDockerCompose]" echo "[installUtils]" } [ $# -lt 1 ] && usage_func && exit case $1 in uninstallOldVersions) echo "Unstalling old versions..." uninstall_old_versions_func ;; installRepo) echo "Installing Repo for docker installation..." install_repo_func ;; installDocker) echo "Installing Docker-ce ..." install_Docker_func ;; checkDocker) echo "Checking docker env..." check_Docker_Env_func ;; installDockerCompose) echo "Installing Docker Compose ..." install_DockerCompose_func ;; uninstallDockerCompose) echo "Uninstalling Docker Compose ..." uninstall_DockerCompose_func ;; installUtils) echo "Installing useful utils ..." install_utils_func ;; *) echo "Unknown cmd: $1" esac
true
a50bea52ccbf0d4fdaa1af5b4394c113ff7d88a9
Shell
cloudfoundry/bosh-system-metrics-server-release
/jobs/system-metrics-server/templates/ctl.erb
UTF-8
682
3.390625
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#!/bin/bash RUN_DIR=/var/vcap/sys/run/system-metrics-server LOG_DIR=/var/vcap/sys/log/system-metrics-server PIDFILE=${RUN_DIR}/system-metrics-server.pid JOB_DIR=/var/vcap/jobs/system-metrics-server CONFIG_DIR=${JOB_DIR}/config CERT_DIR=${JOB_DIR}/config/certs PACKAGE_DIR=/var/vcap/packages/system-metrics-server case $1 in start) mkdir -p $RUN_DIR $LOG_DIR chown -R vcap:vcap $RUN_DIR $LOG_DIR cd $PACKAGE_DIR ulimit -n 8192 echo $$ > $PIDFILE exec chpst -u vcap:vcap ./system-metrics-server \ --config="${CONFIG_DIR}/config.yml" \ &>> ${LOG_DIR}/system-metrics-server.log ;; stop) kill `cat $PIDFILE` rm -f $PIDFILE ;; *) echo "Usage: ctl {start|stop}" ;; esac
true
68842d7b017285447e05a1c272cd5255b8e6d517
Shell
makenew/serverless-python
/makenew.sh
UTF-8
2,780
4.09375
4
[ "MIT" ]
permissive
#!/usr/bin/env sh set -e set -u find_replace () { git grep --cached -Il '' | xargs sed -i.sedbak -e "$1" find . -name "*.sedbak" -exec rm {} \; } sed_insert () { sed -i.sedbak -e "$2\\"$'\n'"$3"$'\n' $1 rm $1.sedbak } sed_delete () { sed -i.sedbak -e "$2" $1 rm $1.sedbak } check_env () { test -d .git || (echo 'This is not a Git repository. Exiting.' && exit 1) for cmd in ${1}; do command -v ${cmd} >/dev/null 2>&1 || \ (echo "Could not find '$cmd' which is required to continue." && exit 2) done echo echo 'Ready to bootstrap your new project!' echo } stage_env () { echo echo 'Removing origin and tags.' git tag | xargs git tag -d git branch --unset-upstream git remote rm origin echo git rm -f makenew.sh echo echo 'Staging changes.' git checkout poetry.lock git add --all echo echo 'Done!' echo } makenew () { echo 'Answer all prompts.' echo 'There are no defaults.' echo 'Example values are shown in parentheses.' read -p '> Package title (My Package): ' mk_title read -p '> Package name (my-package): ' mk_slug read -p '> Module name (my_package): ' mk_module read -p '> Package description: ' mk_description read -p '> Author name (Linus Torvalds): ' mk_author read -p '> Author email ([email protected]): ' mk_email read -p '> GitHub user or organization name (my-user): ' mk_user read -p '> GitHub repository name (my-repo): ' mk_repo read -p '> Serverless stack name (my-stack): ' mk_stack sed_delete README.rst '18,130d' sed_insert README.rst '18i' 'TODO' old_title="Serverless Python Project Skeleton" old_title_length=${#old_title} new_title_length=${#mk_title} old_title_underline="" new_title_underline="" for ((i=1;i<=old_title_length;i++)); do old_title_underline="${old_title_underline}="; done for ((i=1;i<=new_title_length;i++)); do new_title_underline="${new_title_underline}="; done find_replace "s/^version = \".*/version = \"0.0.0\"/g" find_replace "s/current_version = .*/current_version = 0.0.0/g" find_replace "s/${old_title}/${mk_title}/g" find_replace "s/${old_title_underline}/${new_title_underline}/g" find_replace "s/Package skeleton for a Python Serverless project on AWS Lambda\./${mk_description}/g" find_replace "s/2022 Evan Sosenko/2022 ${mk_author}/g" find_replace "s/Evan Sosenko/${mk_author}/g" find_replace "s/razorx@evansosenko\.com/${mk_email}/g" find_replace "s/makenew\/serverless-python/${mk_user}\/${mk_repo}/g" find_replace "s/makenew-serverless-python/${mk_slug}/g" find_replace "s/makenew_serverless_python/${mk_module}/g" find_replace "s|service: serverless-python|service: ${mk_stack}|g" git mv makenew_serverless_python ${mk_module} echo echo 'Replacing boilerplate.' } check_env 'git read sed xargs' makenew stage_env exit
true
4298358674e424fdff14e39c952b9c42dd5607cc
Shell
webfrogs/ToolKit
/configs/xcode/xcode-configer.sh
UTF-8
431
3.5625
4
[]
no_license
#!/bin/sh set -e ShellFolderPath=$(cd $(dirname $0) && pwd) cd "${ShellFolderPath}" case "$(uname -s)" in Darwin) SnippetsFolderPath="$HOME/Library/Developer/Xcode/UserData/CodeSnippets" if [[ -d "${SnippetsFolderPath}" ]]; then rm -rf "${SnippetsFolderPath}" fi if [[ -L "${SnippetsFolderPath}" ]]; then rm "${SnippetsFolderPath}" fi ln -s "${ShellFolderPath}/CodeSnippets" "${SnippetsFolderPath}" ;; esac
true
7f65f3eca03568d68bcad5084ff6e6df57107fa6
Shell
activeagenda/aa-patch
/s2a-pomo
UTF-8
303
2.546875
3
[]
no_license
#!/bin/bash # Merge *.po AA files and produce finall *.mo file cd /var/www/s2a/active_agenda/lang/$1/LC_MESSAGES shopt -s extglob msgcat --use-first active_agenda.base.$1.po !(active_agenda|active_agenda.base.*).po | msgattrib --no-fuzzy -o active_agenda.po msgfmt active_agenda.po -o active_agenda.mo
true
7d50260d71f85effbb73ce39566d461444551965
Shell
jnwatts/espnode
/ssl/clients/file_to_hex.sh
UTF-8
329
3.796875
4
[ "BSD-2-Clause" ]
permissive
#!/bin/bash if [ $# -lt 1 ]; then echo "Usage: ${0} <filename> [<varname>]" >&2 exit 1 fi filename="${1}" varname="${2:-$(basename ${filename} | tr '.' '_')}" echo "${varname}" while IFS='' read -r line || [[ -n "${line}" ]]; do printf "%s\r\n" "${line}" | od -A n -t x1 | tr -d " \t\n\r" done < "${filename}" echo
true
d11477b95fa93ca36b987f074dfb8f77f1d40034
Shell
caliburn1994/ubuntu-minikube
/deploy/sub/localstack/test/s3.sh
UTF-8
1,306
3.53125
4
[]
no_license
#!/usr/bin/env bash PROJECT_ROOT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")"/../../../.. >/dev/null 2>&1 && pwd)" && . "${PROJECT_ROOT_PATH}/deploy/common.sh" . "${PROJECT_ROOT_PATH}/deploy/sub/localstack/aws/bashrc_aws.sh" || echo_warn "Failed to import bashrc_aws.sh" bucket_name=test-bucket function create_bucket() { echo_debug "Creating a bucket..." aws s3 mb s3://${bucket_name} } function remove_bucket() { echo "Removing the bucket..." aws s3 rb s3://${bucket_name} } function ls_bucket() { echo "List all buckets..." aws s3 ls --profile=localstack } function ls_objects() { echo "List all objects..." aws s3 ls --profile=localstack s3://${bucket_name} } upload_file="s3_test.txt" function upload_s3() { printf "Uploading..., the content of the file is: " echo "test" | tee ./${upload_file} aws s3 cp ${upload_file} s3://${bucket_name} rm ${upload_file} } function download_s3() { printf "Downloading..." aws s3 cp s3://${bucket_name}/${upload_file} ./ printf "the content of the file is: " && cat $upload_file rm ${upload_file} } function remove_all_objects() { echo "Removing all objects..." aws s3 rm s3://${bucket_name} --recursive } echo_running create_bucket ls_bucket upload_s3 ls_objects download_s3 remove_all_objects remove_bucket ls_bucket
true
c6ae4ab6a387db28bbd2ca3a23104746b1fb7c2c
Shell
ODEX-TOS/packages
/cln/repos/extra-x86_64/PKGBUILD
UTF-8
568
2.78125
3
[ "GPL-1.0-or-later", "MIT" ]
permissive
# Maintainer: Eric Bélanger <[email protected]> pkgname=cln pkgver=1.3.6 pkgrel=2 pkgdesc="Class library for numbers" arch=('x86_64') url="https://www.ginac.de/CLN/" license=('GPL') depends=('gmp') makedepends=('texlive-core') source=(https://www.ginac.de/CLN/${pkgname}-${pkgver}.tar.bz2) sha1sums=('144f15a57f4b25ada0f10ff28458de03578f6f16') build() { cd ${pkgname}-${pkgver} ./configure --prefix=/usr make all html pdf } check() { cd ${pkgname}-${pkgver} make check } package() { cd ${pkgname}-${pkgver} make DESTDIR="${pkgdir}" install install-html install-pdf }
true
7c9c0567babe4916c78ce9fecab0c147661f2cc0
Shell
byun-sungwoo/configs
/shell/.my_commands.sh
UTF-8
2,980
2.765625
3
[]
no_license
# ███╗ ███╗██╗ ██╗ ██████╗ ██████╗ ███╗ ███╗███╗ ███╗ █████╗ ███╗ ██╗██████╗ ███████╗ ███████╗██╗ ██╗ # ████╗ ████║╚██╗ ██╔╝ ██╔════╝██╔═══██╗████╗ ████║████╗ ████║██╔══██╗████╗ ██║██╔══██╗██╔════╝ ██╔════╝██║ ██║ # ██╔████╔██║ ╚████╔╝ ██║ ██║ ██║██╔████╔██║██╔████╔██║███████║██╔██╗ ██║██║ ██║███████╗ ███████╗███████║ # ██║╚██╔╝██║ ╚██╔╝ ██║ ██║ ██║██║╚██╔╝██║██║╚██╔╝██║██╔══██║██║╚██╗██║██║ ██║╚════██║ ╚════██║██╔══██║ # ██╗██║ ╚═╝ ██║ ██║███████╗╚██████╗╚██████╔╝██║ ╚═╝ ██║██║ ╚═╝ ██║██║ ██║██║ ╚████║██████╔╝███████║██╗███████║██║ ██║ # ╚═╝╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚══════╝╚═╝╚══════╝╚═╝ ╚═╝ # Directories BYUN_CFG=~/Documents/byun-sungwoo/configs BYUN_BYT=~/Documents/byun-sungwoo/bytris ECLIPSE_PATH=~/eclipse/java-2019-12/eclipse # update functions in bashrc function update-bashrc() { source ~/.bashrc } # ssh to burrow as dsbyun function dsbyun@burrow() { ssh [email protected] } # ssh to silo as dsbyun function dsbyun@silo() { ssh [email protected] } # Gogh function gogh-color() { bash -c "$(wget -qO- https://git.io/vQgMr)" } # Update vim and tmux to configs function update-config() { og_pwd=$(pwd) echo [copying home changes to config repository] cd $BYUN_CFG cp -v ~/.my_commands.sh ~/.h212.sh ~/.bashrc shell cp -v ~/.tmux.conf tmux cp -v ~/.vimrc ~/.gvimrc vim cp -v ~/.vim/coc-settings.json vim echo [push changes to github] git add -A git commit -m "update-config" git push origin master cd $og_pwd } # Star Wars function starwars() { telnet towel.blinkenlights.nl } # Bytris function bytris() { og_pwd=$(pwd) cd $BYUN_BYT ./compilerun.sh cd $og_pwd } # Launch Eclipse function eclipse_launch() { og_pwd=$(pwd) cd $ECLIPSE_PATH ./eclipse cd $og_pwd }
true
a7ed5d9ffa8d2e361385e10d287c27aeeb482a5e
Shell
joiceedattel/Scripts
/Scripts_Mumbai/nondc_script/script/test1_ext.sh
UTF-8
634
3.203125
3
[]
no_license
#!/bin/bash # variable Declaration & Initialization... extension = `date +%Y%b%d` backuppath="/home/Abhinav/backup/$extension/cobol/" echo "$extension" echo " " echo " " echo " moving gnt's to efeap bin " echo " ================================================ " echo " " echo " " mkdir -p $backuppath sleep 2 cd /home/Abhinav/todays_delivery/efeap/bin ls *.gnt 1> gnt.txt if [ ! -s gnt.txt ]; then echo " There is no gnt files to move" cd /efeap/bin else \cp gnt.txt /efeap/bin/ \cp *.gnt $backuppath cd /home/Abhinav/todays_delivery/efeap/bin mv *.gnt /efeap/bin fi
true
2906e71919f16e05a62c58faf0cf1725a8df9f53
Shell
DianaChj/dockerized-webpage-test
/d_script.sh
UTF-8
554
3.484375
3
[]
no_license
#!/bin/bash API_KEY=$1 URL=$2 USERNAME=$3 PASSWORD=$4 node index.js $API_KEY $URL $USERNAME $PASSWORD counter=0 DIR=/home/node/app/artifacts/ FILES_IN_DIR=("report-0.trace.json" "report-0.devtoolslog.json" "report.html" "screenshot.jpg" "waterfall.png" "video.mp4") ls -la $DIR if [ -d "$DIR" ]; then echo "$DIR is exists" for i in "${FILES_IN_DIR[@]}" do [ -e ${DIR}$i ] && counter=$((counter+1)) done if [ "$counter" -eq "${#FILES_IN_DIR[@]}" ]; then echo "exit 0"; exit 0; else echo "exit 1"; exit 1; fi else exit 1; fi
true
adad254b59ce6eab30ad9980d406747979bc1b2d
Shell
kaki-xxx/compete_temp
/update_snippet.sh
UTF-8
341
2.828125
3
[]
no_license
#!/usr/bin/env bash # update snippet for VSCode # required: jq snippet_path="/mnt/c/Users/sukam/AppData/Roaming/Code/User/snippets" snippet_file="${snippet_path}/rust.json" tmpfile=$(mktemp) cp ${snippet_file} "${snippet_file}.old" cargo snippet -t vscode >${tmpfile} jq -s add ${tmpfile} template_with_placeholder.json >${snippet_file}
true
315af5ce51255eba4647741644b69ba6fffd96d2
Shell
jbeilstenedmands/dials
/.travis/clean-cctbx-dxtbx
UTF-8
275
3.0625
3
[ "BSD-3-Clause" ]
permissive
#!/bin/bash for repository in cctbx_project dxtbx; do if [ -e $HOME/build_dials/modules/${repository}/.git ]; then echo Cleaning ${repository} repository cd $HOME/build_dials/modules/${repository} || exit 1 git reset --hard HEAD git clean -dffxq fi done
true
384549ff6b1a31d78afd8a592788c26f78925b40
Shell
wenh81/simulations-fiber
/single-polarization/simulationForChayan/interleave4OOK/cluster_powerOOK_0dBm_channelSpacing_100GHz/simulateScenario160.sh
UTF-8
1,197
2.546875
3
[]
no_license
#!/usr/bin/env bash #SBATCH -p glenn #SBATCH -A C3SE2018-1-15 #SBATCH -J simulateScenario160 #SBATCH -N 1 #SBATCH -t 0-10:00:00 #SBATCH -o simulateScenario160.stdout #SBATCH -e simulateScenario160.stderr module load matlab cp -r $SLURM_SUBMIT_DIR/* $TMPDIR cd $TMPDIR array=( "-18;0;83000000000;100000000000" "-17;0;83000000000;100000000000" "-16;0;83000000000;100000000000" "-15;0;83000000000;100000000000" "-14;0;83000000000;100000000000" "-13;0;83000000000;100000000000" "-12;0;83000000000;100000000000" "-11;0;83000000000;100000000000" "-10;0;83000000000;100000000000" "-9;0;83000000000;100000000000" "-8;0;83000000000;100000000000" "-7;0;83000000000;100000000000" "-6;0;83000000000;100000000000" "-5;0;83000000000;100000000000" "-4;0;83000000000;100000000000" "-3;0;83000000000;100000000000" ) for i in "${array[@]}" do arr=(${i//;/ }) echo ${arr[0]} ${arr[1]} ${arr[2]} ${arr[3]} RunMatlab.sh -o "-nodesktop -nosplash -singleCompThread -r \"simulateScenario(${arr[0]},${arr[1]},${arr[2]},${arr[3]});\"" & sleep 0.1 done wait mkdir $SLURM_SUBMIT_DIR/simulateScenario160 cp -rf $TMPDIR/results/* $SLURM_SUBMIT_DIR/simulateScenario160 rm -rf $TMPDIR/* #End of script
true
a3102ff76b07be86e377b36a33b850d7d62bfc06
Shell
maghoff/plaintalk-documentation
/package.sh
UTF-8
125
2.53125
3
[]
no_license
#!/bin/bash set -e FILES="*.js *.html *.css *.ico font/*" tar -zc --transform 's,^,plaintalk/,' -fplaintalk.tar.xz $FILES
true
bfe42635924c69cf0106f3680acc294e8f9eab22
Shell
xiatian0918/auto_scripts
/工作脚本Shell/add_data_ansible.sh
UTF-8
489
3.046875
3
[]
no_license
#!/bin/sh md51=$(md5sum /mnt/monitor_data/vm_ip/vm_vm_ip.txt|awk '{print $1}') sleep 1m md52=$(md5sum /mnt/monitor_data/vm_ip/vm_vm_ip.txt|awk '{print $1}') if [ "$md52" != "$md51" ];then sed -i '26,$d' /etc/ansible/hosts for i in `cat /mnt/monitor_data/vm_ip/vm_vm_ip.txt ` do echo -e "$i ansible_user=\"administrator\" ansible_password=\"Toprs!@#123\" ansible_port=5985 ansible_connection=\"winrm\" ansible_winrm_server_cert_validation=ignore" >>/etc/ansible/hosts done fi
true
b4138796d00c739d5c88e2c199d45ecbb8fbbdbe
Shell
feevars/fatec4SEM
/ISO200-Sistemas Operacionais II/Fernanda/1110481823022/prova/prova_exec01/exec01.sh
UTF-8
393
3.5625
4
[]
no_license
#!/bin/bash LIGHTCYAN='\033[1;36m' NOCOLOR='\033[0m' echo -e "${LIGHTCYAN}Fazer um script que leia do terminal o nome de um usuário e escreva na tela quais são os grupos que este pertence." echo "usuarios disponiveis: " awk -F':' '{ print $1}' /etc/passwd echo "Diz aí o usuário que você deseja verificar os grupos:" read usuario echo "Grupos:" groups $usuario echo -e ${NOCOLOR}
true
a3d74f384b63bfb01ae2d53bd885617f70f7ef2f
Shell
aizuddin85/advdev-blue-green
/Infrastructure/bin/cleanup.sh
UTF-8
346
3.015625
3
[]
no_license
#!/bin/bash # Delete all Homework Projects if [ "$#" -ne 1 ]; then echo "Usage:" echo " $0 GUID" exit 1 fi GUID=$1 echo "Removing all Homework Projects for GUID=$GUID" oc delete project $GUID-nexus oc delete project $GUID-sonarqube oc delete project $GUID-jenkins oc delete project $GUID-parks-dev oc delete project $GUID-parks-prod
true
1469693797545ea7b2c0c73098a3d25e352f492c
Shell
chocolat0w0/github-pull-requests
/review.sh
UTF-8
312
2.84375
3
[]
no_license
#!/bin/bash git fetch --prune > /dev/null 2>&1 git checkout dev > /dev/null 2>&1 git branch -D reviewing > /dev/null 2>&1 script=$(dirname ${0})/prfetch.py number=`${script} | percol | sed 's/,/_/g' | sed -E 's/[\t ]+/,/g' | cut -d, -f2` git fetch origin pull/${number}/head:reviewing git checkout reviewing
true
87542d622187ab030aadcf99f0253f0bd2fc3bde
Shell
Vad1mo/exoscale-cloud-controller-manager
/integtest/test-nlb-ingress.bash
UTF-8
5,716
3.734375
4
[ "Apache-2.0" ]
permissive
#!/usr/bin/env bash set -e source "$INTEGTEST_DIR/test-helpers.bash" echo ">>> TESTING CCM-MANAGED NLB INSTANCE" echo "- Deploying cluster ingress controller" sed -r \ -e "s/%%EXOSCALE_ZONE%%/$EXOSCALE_ZONE/" \ "${INTEGTEST_DIR}/manifests/ingress-nginx.yml.tpl" \ | kubectl $KUBECTL_OPTS apply -f - # It is not possible to `kubectl wait` on an Ingress resource, so we wait until # we see a public IP address associated to the Service Load Balancer... _until_success "test -n \"\$(kubectl --namespace ingress-nginx get svc/ingress-nginx-controller \ -o=jsonpath='{.status.loadBalancer.ingress[].ip}')\"" export INGRESS_NLB_IP=$(kubectl --namespace ingress-nginx get svc/ingress-nginx-controller \ -o=jsonpath='{.status.loadBalancer.ingress[].ip}') export INGRESS_NLB_ID=$(exo nlb list -z $EXOSCALE_ZONE -O text \ | awk "/${INGRESS_NLB_IP}/ { print \$1 }") echo "- Deploying test application" kubectl $KUBECTL_OPTS apply -f "${INTEGTEST_DIR}/manifests/hello-ingress.yml" kubectl $KUBECTL_OPTS wait --for condition=Available deployment.apps/hello ### Test the actual NLB + ingress-nginx controller + service + app chain echo "- End-to-end requests" curl_opts="--retry 10 --retry-delay 5 --retry-connrefused --silent" curl $curl_opts http://${INGRESS_NLB_IP} > /dev/null || (echo "FAIL" ; return 1) curl $curl_opts --insecure https://${INGRESS_NLB_IP} > /dev/null || (echo "FAIL" ; return 1) ### Test the generated NLB services' properties output_template='' output_template+='Name={{ println .Name }}' output_template+='InstancePoolID={{ println .InstancePoolID }}' output_template+='Protocol={{ println .Protocol }}' output_template+='Port={{ println .Port }}' output_template+='Strategy={{ println .Strategy }}' output_template+='HealthcheckMode={{ println .Healthcheck.Mode }}' output_template+='HealthcheckInterval={{ println .Healthcheck.Interval }}' output_template+='HealthcheckTimeout={{ println .Healthcheck.Timeout }}' output_template+='HealthcheckRetries={{ println .Healthcheck.Retries }}' exo nlb show \ --output-template '{{range .Services}}{{println .ID}}{{end}}' \ -z ${EXOSCALE_ZONE} $INGRESS_NLB_ID | while read svcid; do exo nlb service show \ -z $EXOSCALE_ZONE \ --output-template "$output_template" \ $INGRESS_NLB_ID $svcid > "${INTEGTEST_TMP_DIR}/nlb_service_${svcid}" svcport=$(awk -F= '$1 == "Port" {print $2}' < "${INTEGTEST_TMP_DIR}/nlb_service_${svcid}") case $svcport in 80) mv "${INTEGTEST_TMP_DIR}/nlb_service_${svcid}" "${INTEGTEST_TMP_DIR}/nlb_service_http" export INGRESS_NLB_SERVICE_HTTP_ID=$svcid ;; 443) mv "${INTEGTEST_TMP_DIR}/nlb_service_${svcid}" "${INTEGTEST_TMP_DIR}/nlb_service_https" export INGRESS_NLB_SERVICE_HTTPS_ID=$svcid ;; *) echo "error: unexpected service port $svcport, expected either 80 or 443" exit 1 ;; esac done ## HTTP service echo "- Checking ingress HTTP NLB service properties" while read l; do # Split "k=v" formatted line into variables $k and $v k=${l%=*} v=${l#*=} case "${k}" in Name) _assert_string_match "$v" "-80$" ;; InstancePoolID) _assert_string_equal "$v" "$NODEPOOL_ID" ;; Protocol) _assert_string_equal "$v" "tcp" ;; Port) _assert_string_equal "$v" "80" ;; Strategy) _assert_string_equal "$v" "round-robin" ;; HealthcheckMode) _assert_string_equal "$v" "tcp" ;; HealthcheckInterval) _assert_string_equal "$v" "10s" ;; HealthcheckTimeout) _assert_string_equal "$v" "5s" ;; HealthcheckRetries) _assert_string_equal "$v" "1" ;; *) echo "error: unexpected key \"$k\"" ; exit 1 ;; esac done < "${INTEGTEST_TMP_DIR}/nlb_service_http" ## HTTPS service echo "- Checking ingress HTTPS NLB service properties" while read l; do # Split "k=v" formatted line into variables $k and $v k=${l%=*} v=${l#*=} case "${k}" in Name) _assert_string_match "$v" "-443$" ;; InstancePoolID) _assert_string_equal "$v" "$NODEPOOL_ID" ;; Protocol) _assert_string_equal "$v" "tcp" ;; Port) _assert_string_equal "$v" "443" ;; Strategy) _assert_string_equal "$v" "round-robin" ;; HealthcheckMode) _assert_string_equal "$v" "tcp" ;; HealthcheckInterval) _assert_string_equal "$v" "10s" ;; HealthcheckTimeout) _assert_string_equal "$v" "5s" ;; HealthcheckRetries) _assert_string_equal "$v" "1" ;; *) echo "error: unexpected key \"$k\"" ; exit 1 ;; esac done < "${INTEGTEST_TMP_DIR}/nlb_service_https" ## Updating ingress controller Service to switch NLB service health checking to "http" mode echo "- Updating ingress NLB services" patch='{"metadata":{"annotations":{' patch+='"service.beta.kubernetes.io/exoscale-loadbalancer-service-healthcheck-mode":"http",' patch+='"service.beta.kubernetes.io/exoscale-loadbalancer-service-healthcheck-uri":"/"' patch+='}}}' kubectl -n ingress-nginx patch svc ingress-nginx-controller -p "$patch" _until_success "test \"\$(exo nlb show \ --output-template '{{range .Services}}{{println .ID}}{{end}}' \ -z \${EXOSCALE_ZONE} \$INGRESS_NLB_ID | while read svcid; do exo nlb service show -z \$EXOSCALE_ZONE --output-template '{{.Healthcheck.Mode}}' \ \$INGRESS_NLB_ID \$svcid ; done)\" == \"httphttp\"" ## Before handing out to the cleanup phase, delete the ingress controller Service in order ## to delete the managed NLB instance, otherwise it won't be possible to delete the ## cluster Nodepool's Instance Pool. echo "- Deleting ingress NLB" sed -r \ -e "s/%%EXOSCALE_ZONE%%/$EXOSCALE_ZONE/" \ "${INTEGTEST_DIR}/manifests/ingress-nginx.yml.tpl" \ | kubectl $KUBECTL_OPTS delete -f - _until_success "test ! \$(exo nlb show -z \${EXOSCALE_ZONE} \$INGRESS_NLB_ID 2>/dev/null)" echo "<<< PASS"
true
5f4e820b5f250259293b58e8391139e71f868039
Shell
clauw87/it_academy_git_repo
/Bash/bash_scripts/guess.sh
UTF-8
438
4.15625
4
[]
no_license
#!/bin/bash rand=$RANDOM secret=${rand:0:1} function game { read -p "Guess the number I am thinking of [n] " guess while [[ $guess != $secret ]]; do read -p "Nope. try again! " guess done echo "Good job, $secret is it! You are great at guessing!" } function generate { echo "A random number is: $rand" echo -e "Hint: type \033[1m$0 game\033[0m for a fun diversion!" } if [[ $1 =~ game|Game|GAME ]]; then game else generate fi
true
e8c5c680cf8fcd6ad30f457cc90cbd8905eb9eae
Shell
sreynen/advent-2016
/7b.sh
UTF-8
447
2.671875
3
[]
no_license
cat 7-input.txt | sed 's/\(\[[a-z]*\]\)\(.\{1,\}\)/-\2-\1/g' | sed 's/\(\[[a-z]*\]\)\(.\{1,\}\)/-\2-\1/g' | sed 's/\(\[[a-z]*\]\)\(.\{1,\}\)/-\2-\1/g' | sed 's/\(\[[a-z]*\]\)\(.\{1,\}\)/-\2-\1/g' | sed 's/\([a-z]\)\1\1/\1--\1/g' | grep -E "^[^\[]*([a-z])([a-z])\1.*\[.*\2\1\2" | wc -l # Move all [] to the end (repeat 4 times, just to be sure) # Split all aaa patterns (don't count) # Find all aba before [, followed by bab after [ # Count lines
true
65385d2538cd1af1241ea0795f9c753afdeef553
Shell
FauxFaux/debian-control
/s/sbws/sbws_1.0.2-1_all/postrm
UTF-8
883
3.28125
3
[]
no_license
#!/bin/sh set -e case $1 in purge) if which deluser >/dev/null 2>&1 ; then deluser --quiet --system sbws > /dev/null || true else echo >&2 "Not removing sbws system account because deluser command was not found" fi ;; esac # Automatically added by dh_installsystemd/11.5.3 if [ -d /run/systemd/system ]; then systemctl --system daemon-reload >/dev/null || true fi # End automatically added section # Automatically added by dh_installsystemd/11.5.3 if [ "$1" = "remove" ]; then if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper mask 'sbws.service' >/dev/null || true fi fi if [ "$1" = "purge" ]; then if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'sbws.service' >/dev/null || true deb-systemd-helper unmask 'sbws.service' >/dev/null || true fi fi # End automatically added section exit 0
true
56f727b3aa31b440906fb8b460445f29b9c55b7a
Shell
KsanterX/scripts
/bash/touchf_install.sh
UTF-8
440
3.984375
4
[]
no_license
#!/bin/bash EX_FOLDER="/usr/local/bin" SCRIPT_NAME="touchf" function script_install() { if [ ! -d $EX_FOLDER ] then mkdir $EX_FOLDER log "o1" fi cp $SCRIPT_NAME $EX_FOLDER/$SCRIPT_NAME chmod +x $EX_FOLDER/$SCRIPT_NAME log "o2" } function log() { case $1 in o1) msg="Directory $EX_FOLDER created." ;; o2) msg="Installation of $SCRIPT_NAME finished" ;; esac echo $msg } script_install
true
90298cb18b27fb773ed40b0374c6e8ede4d495b3
Shell
GoC-Spending/fuzzy-tribble
/script/build_container.sh
UTF-8
1,069
2.609375
3
[ "MIT" ]
permissive
#!/usr/bin/env bash set -ex apt-get update apt-get install -y software-properties-common python-software-properties python3-pip add-apt-repository -y ppa:deadsnakes/ppa apt-get update && apt-get install -y python3.6 python3.6-dev export DEBIAN_FRONTEND=noninteractive apt-get install -y mysql-server-5.7 libmysqlclient-dev mkdir -p /var/lib/mysql mkdir -p /var/run/mysqld mkdir -p /var/log/mysql sed -i -e "$ a [client]\n\n[mysql]\n\n[mysqld]" /etc/mysql/my.cnf sed -i -e "s/\(\[client\]\)/\1\ndefault-character-set = utf8/g" /etc/mysql/my.cnf sed -i -e "s/\(\[mysql\]\)/\1\ndefault-character-set = utf8/g" /etc/mysql/my.cnf sed -i -e "s/\(\[mysqld\]\)/\1\ninit_connect='SET NAMES utf8'\ncharacter-set-server = utf8\ncollation-server=utf8_unicode_ci\nbind-address = 0.0.0.0/g" /etc/mysql/my.cnf apt-get clean update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.5 1 update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.6 2 ln -sf /usr/bin/python3 /usr/bin/python && ln -s /usr/bin/pip3 /usr/bin/pip pip install --upgrade pip
true
ff2d4f3ae62e430f3a269467ea2f96f69b0b70b7
Shell
benjamin-thomas/scrot-service
/manage/install
UTF-8
690
3.0625
3
[]
no_license
#!/bin/bash set -e set -x function missing_dep { echo "Missing dependency: $1, exting now!" exit 1 } command -v feh || missing_dep "feh" command -v scrot || missing_dep "scrot" command -v mogrify || missing_dep "mogrify (from imagemagick)" cp -s $(pwd)/bin/scrot-service-read ~/.local/bin/ mkdir -p ~/.local/systemd-bin/ mkdir -p ~/.config/systemd/user/ cp -s $(pwd)/bin/scrot-service-run ~/.local/systemd-bin/ # systemd does not like symlinks cp -l $(pwd)/.config/systemd/user/scrot.service ~/.config/systemd/user/ systemctl --user daemon-reload systemctl --user start scrot.service systemctl --user enable scrot.service journalctl --user-unit scrot.service --since today -f
true
c690a52087d763cde8a5d12bbccfba99062ced1d
Shell
Jensjee/Linux-eindopdracht
/installminion.sh
UTF-8
529
3.3125
3
[]
no_license
#! /usr/bin/bash read -p "minion naam: " MINION_NAAM #naam van de minion read -p "IP master: " IP_MASTER #ip van de master # installeer salt curl -L https://bootstrap.saltstack.com -o install_salt.sh sudo sh install_salt.sh -A $IP_MASTER sudo service salt-minion stop #minion naam veranderen sudo rm -rf /etc/salt/minion_id sudo touch /etc/salt/minion_id && sudo chmod 777 /etc/salt/minion_id sudo printf "$MINION_NAAM" > /etc/salt/minion_id sudo service salt-minion start echo "Salt minion is geinstalleerd en opgestart."
true
dfafd41e735e7f9e6bf8e2970f8c8e2f963ed486
Shell
qedadmin/docker-base-debian
/root/etc/cont-init.d/01_nis.sh
UTF-8
1,173
3.546875
4
[]
no_license
#!/usr/bin/with-contenv bash set -e DEFAULTDOMAIN=${DEFAULTDOMAIN:=""} NISSERVERS=${NISSERVERS:=""} if [ ! -z "$DEFAULTDOMAIN" ] && [ ! -z "$NISSERVERS" ]; then echo "Setting up NIS/YP" echo "defaultdomain: '${DEFAULTDOMAIN}'" echo "NIS server: '${NISSERVERS}'" echo ${DEFAULTDOMAIN} > /etc/defaultdomain echo "domain ${DEFAULTDOMAIN} server ${NISSERVERS}" > /etc/yp.conf cat <<EOT > /etc/nsswitch.conf passwd: files nis group: files nis shadow: files nis gshadow: files hosts: files dns networks: files protocols: db files services: db files ethers: db files rpc: db files netgroup: nis EOT mkdir -p /etc/services.d/rpcbind/ cat <<EOT > /etc/services.d/rpcbind/run #!/usr/bin/execlineb -P /sbin/rpcbind -f EOT chmod +x /etc/services.d/rpcbind/run mkdir -p /etc/services.d/nis/ cat <<EOT > /etc/services.d/nis/run #!/usr/bin/env bash exec /usr/sbin/ypbind -n EOT chmod +x /etc/services.d/nis/run else echo "'DEFAULTDOMAIN' & 'NISSERVERS' are undefined. Skipped NIS configuration." fi
true
e76ceb6f4731053562ba6ebc94b4880f6e4142a2
Shell
rtiangha/kobo-start-menu-09
/KSM09/adds/kbmenu/helpers/activate_dictionary_helper.sh
UTF-8
1,806
3.515625
4
[]
no_license
#!/bin/sh ksmroot=${ksmroot:-"/adds/kbmenu"} ksmuser=${ksmuser:-"/mnt/onboard/.adds/kbmenu_user"} sqliteprog=$ksmroot/tools/sqlite3 database=/mnt/onboard/.kobo/KoboReader.sqlite dictionary="$1" languageCodeFile="${ksmuser}/txt/languageCodes.txt" if [ ! -f "$languageCodeFile" ]; then echo "Error:_Cannot_find_language_code file" exit; fi if [ ! -f "$database" ]; then echo "Error:_Cannot_find_database" exit; fi dict_size=$(stat -c%s $dictionary) dict_lastmodified=$(stat -c%y $dictionary) dict_lastmodified=${dict_lastmodified%%.*} dict_day=${dict_lastmodified%% *} dict_time=${dict_lastmodified#*" "} dict_lastmodified=$(echo $dict_day"T"$dict_time) dictionary=$(basename $dictionary) if [ "$dictionary" == "dicthtml.zip" ]; then dict_langCode1="en" dict_langCode2="" else fname=${dictionary/".zip"/} dict_suffix=${fname/"dicthtml-"/} dict_langCode1=$(echo $dict_suffix | awk -F"-" '{print $1}') dict_langCode2=$(echo $dict_suffix | awk -F"-" '{print $2}') dict_langName1=$(awk -v key="$dict_langCode1" -F"=" '{if ($1 == key) {print $2}}' $languageCodeFile) if [ "$dict_langName1" == "" ]; then echo "undefined language code: $fname" exit fi if [ "$dict_langCode2" != "" ]; then dict_langName2=$(awk -v key="$dict_langCode2" -F"=" '{if ($1 == key) {print $2}}' $languageCodeFile) if [ "$dict_langName2" == "" ]; then echo "undefined language code: $fname" exit; fi fi fi dict_displayName=$dict_langName1 if [ "$dict_langName2" != "" ]; then dict_displayName="$dict_displayName - $dict_langName2" fi dict_suffix="-$dict_suffix" $sqliteprog $database "replace into 'Dictionary' VALUES ('$dict_suffix', '$dict_displayName', 'true', '$dict_size', '$dict_lastmodified', 'true')" echo "add $dict_displayName; sqlite exit code: $?"
true
685c4ea2df1ac56d805519033df5abc0c35ccf97
Shell
mjwilber/mwilber-dotfiles
/bin/ingester
UTF-8
2,458
4.25
4
[]
no_license
#!/usr/bin/env bash # # Renames specifiex file(s) by using exif data (via exiv2). # to $year/$month/$year-$month-$day/Wilber_$year$month$day_$imageNumber.$ext # # The only thing configurable at this point is the prefix using the -p option # # Requires exiv2 for meta data extraction and setting the file's timestamp # debug=0 files_processed=0 prefix_to_remove=IMG_ prefix_to_add=Wilber while getopts dp: o do case "$o" in d) debug=1;; p) prefix_to_add=$OPTARG;; [?]) echo "Usage: $0 [-d] [-p prefix_to_prepend] file(s) ..." >&2 ; exit 1;; esac done shift $(($OPTIND-1)) function debug { if [ $debug -gt 0 ]; then echo $*; fi } function makeImageDirectoryName { # ( year, month, day ) [ $# -lt 2 ] && echo "Invalid number of args to makeImageDirectoryname: $*" && exit -1; year=$1; month=$2; day=$3; imageDir=$year/$month/$year-$month-$day; } function makeNewImageName { # ( imageName, rev, extension, year, month, day ) [ $# -lt 2 ] && echo "Invalid number of args to makaNewImageName : $*" && exit -1; prefix=$prefix_to_add nm=$1; rev=$2; extension=$3; year=$4; month=$5; day=$6; newFileName=${prefix}_${year}${month}${day}_${nm}${rev}.${extension}; } imageRevs="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; echo ""; for picFile in $*; do if [ -d $picFile ]; then continue; fi debug "##### $picFile"; fileExt=${picFile#*.}; imageName=${picFile%.*} # Remove prefix_to_remove if one is defined [ -n $prefix_to_remove ] && imageName=${imageName#*${prefix_to_remove}}; exifOrigDate=`exiv2 pr $picFile | grep timestamp`; year=${exifOrigDate:18:4} month=${exifOrigDate:23:2} dayOfMonth=${exifOrigDate:26:2} makeImageDirectoryName $year $month $dayOfMonth; debug " ext=[$fileExt] imageName=[$imageName] ts=[$exifOrigDate] imageDir=[$imageDir]"; i=0; makeNewImageName "$imageName" "" "$fileExt" "$year" "$month" "$dayOfMonth"; debug " newImageName=[$newFileName]"; while [ -e $imageDir/$newFileName ] && [ $i -lt ${#imageRevs} ]; do imageRev=${imageRevs:i:1}; makeNewImageName "$imageName" "$imageRev" "$fileExt" "$year" "$month" "$dayOfMonth"; ((i+=1)); done [ ! -d $imageDir ] && mkdir -p $imageDir && echo "Created $imageDir"; cp $picFile $imageDir/$newFileName; exiv2 -T $imageDir/$newFileName; printf "%25s ---> %s\n" $picFile $newFileName; ((files_processed+=1)); debug "---- Done with $picFile"; done echo ""; echo "Finished -- $files_processed files processed!"; exit 0;
true
e14af180fc38a74afff881e30e83342c9eeee952
Shell
jokeyrhyme/dotfiles
/config/bashrc
UTF-8
522
3.21875
3
[]
no_license
#! /usr/bin/env bash if [ -f ~/.dotfiles/config/environment.sh ]; then # shellcheck source=./config/environment.sh . ~/.dotfiles/config/environment.sh fi # If not running interactively, don't do anything else [[ $- != *i* ]] && return if [ -f ~/.dotfiles/config/interactive.sh ]; then # shellcheck source=./config/interactive.sh . ~/.dotfiles/config/interactive.sh fi if [ -f ~/.dotfiles/config/interactive.bash ]; then # shellcheck source=./config/interactive.bash . ~/.dotfiles/config/interactive.bash fi
true
ce670395653c5d6b235d04183ae4589a62b83063
Shell
jjm3x3/git-shell-setup
/.git-hooks/prepare-commit-msg
UTF-8
628
2.703125
3
[ "MIT" ]
permissive
#!/bin/sh # prepare-commit-msg hook does: # - Add placeholder # - Insert placeholder at default text in commit body # - Append placeholder for conflict merge commits only # - Massage commit message # - Delete default text in commit body # - Add line after branch # - Add semver instructions to commit message # - Remind to update README.md if not # - Massage merge commit message # - Remove "remote-tracking" from merge commit subject # - Uncomment Conflicts section in merge commit body # - Delete default text in merge commit body # - Remove placeholder export hookname=$(bn "$0") ~/.git-hooks/_hook-main-wrapper "$@"
true
2c805dc290b3f565ed5482563f3fb645a7546cfe
Shell
544759698/code_util
/bin/shell_20230116.sh
UTF-8
981
3.109375
3
[]
no_license
# 1 vim vim file1 # 1.1 命令模式 dd # 删除一行 # 1.2 底行模式 :set nu/nonu #显示/不显示行号 :/world (n N) nohl # 查找world (前一个 后一个)取消高亮 :shift+g / gg # 到文件尾/头 # 2 awk 逗号分隔找出第二列所有值并去重 cat all_part| awk -F, '{print $2}'| sort -u > all_part_uni # 3 sort sort a.txt b.txt | uniq -d # 将a.txt b.txt文件进行排序,uniq使得两个文件中的内容为唯一的,使用-d输出两个文件中次数大于1的内容,即是得到交集 sort a.txt b.txt | uniq # 将a.txt b.txt文件进行排序,uniq使得两个文件中的内容为唯一的,即可得到两个文件的并集 sort a.txt b.txt b.txt | uniq -u #将两个文件排序,最后输出a.txt b.txt b.txt文件中只出现过一次的内容,因为有两个b.txt所以只会输出只在a.txt出现过一次的内容,即是a.txt-b.txt差集 # 4 df du df -h du -h -d 1 具体目录 # 查看某目录下目录和文件大小
true
d6ed25f1b42df60e9df471aed458977a0ed7668b
Shell
xuyueshu/YZQREPO
/yhsjzgtx31/Checkpoint/mission_overdue_rate.sh
UTF-8
3,579
2.859375
3
[ "Unlicense" ]
permissive
#!/bin/sh cd `dirname $0` source ./config.sh exec_dir mission_overdue_rate TARGET_TABLE=ef_assess_point_data_value_info #DATA_NAME=任务逾期率 DATA_NO=RWYQL function import_table() { find_mysql_data " set names utf8; INSERT INTO ${TARGET_TABLE} (data_no,data_name,first_index_type,data_cycle,data_type,data_time,data_value,is_new,create_time) select b.data_no as data_no, b.data_name as data_name, b.first_index_type as first_index_type, b.data_cycle as data_cycle, b.data_type as data_type, a.start_date data_time, cast(a.num1/a.num2*100 as decimal(9,2)) as data_value, 'NO' as is_new, FROM_UNIXTIME(UNIX_TIMESTAMP()) as create_time from ( select a.start_date, sum(case when a.task_status!='YWC' then 1 else 0 end ) as num1, count(a.task_no) as num2 from tm_task_info a left join pm_college_plan_info b on a.plan_no=b.plan_no where b.plan_layer='G_TEACHER' and b.status='NORMAL' and a.task_delete='NORMAL' and a.start_date between '${BEGIN_TIME}' and '${END_TIME}' group by a.start_date ) a ,base_assess_point_data_info b where b.data_no='${DATA_NO}' " fn_log "创建表——任务逾期率:${TARGET_TABLE}" } #查找${TARGET_TABLE}表中时间最近的数据,${TARGET_TABLE}表的is_new的NO改成YES function export_table() { #删除库中最新数据 clear_mysql_data "delete from ${TARGET_TABLE} where DATA_NO='${DATA_NO}' and data_time between '${BEGIN_TIME}' and '${END_TIME}'" #导入最新数据 import_table #查找最新的数据 DATE_TIME=`find_mysql_data "select max(data_time) from ${TARGET_TABLE} where data_no='${DATA_NO}' ;" ` #以后的每一次执行都会修改这个is_new字段,所以全部改成NO clear_mysql_data "update ${TARGET_TABLE} set is_new = 'NO' where data_no='${DATA_NO}';" #is_new的NO改成YES clear_mysql_data "update ${TARGET_TABLE} set is_new = 'YES' where data_time='${DATE_TIME}' and data_no='${DATA_NO}';" } #base_assess_point_data_info 判断质控点'${DATA_NO}'是不是开启 function alter_table(){ is_open=`find_mysql_data "select data_status from base_assess_point_data_info where data_no ='${DATA_NO}';" ` if [ $is_open == "OPEN" ] then echo " 质控点开启 " export_table #script_status的NO改成YES clear_mysql_data "update base_assess_point_data_info set script_status= 'YES' where data_no='${DATA_NO}';" else echo "质控点没有开启 " fi } #抽取begin_time/end_time之间的数据,根据第一学期和第二学期进行数据抽取 function JX_getYearData() { find_mysql_data " select date_format(DATE_FORMAT(begin_time,'%Y-%m-%d %H:%i:%s'),'%Y-%m-%d') as begin_time, date_format(DATE_FORMAT(end_time,'%Y-%m-%d %H:%i:%s'),'%Y-%m-%d') as end_time from base_school_calendar_info where FROM_UNIXTIME(UNIX_TIMESTAMP()) BETWEEN begin_time and end_time;"| while read -a row do BEGIN_TIME=${row[0]} END_TIME=${row[1]} if [ ! -n "$BEGIN_TIME" ]; then echo "SEMESTER_YEAR IS NULL!" else echo "SEMESTER_YEAR IS NOT NULL" echo ${BEGIN_TIME}"=="${END_TIME} alter_table fi done } JX_getYearData finish
true
26c71664f72fb2024e614c975f37a2c065107cea
Shell
splunk/splunk-connect-for-ethereum
/examples/k8s/generate-from-helm
UTF-8
899
3.84375
4
[ "Apache-2.0" ]
permissive
#!/bin/bash set -euo pipefail SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" ETHLOGGER_CHART_DIR="$SCRIPT_DIR/../helm/ethlogger" tmpdir=$(mktemp -d) trap "rm -rf $tmpdir" EXIT render() { local outdir=$1 helm template \ ethlogger \ --namespace ethlogger \ --set networkName=dev \ --set chainName=dev \ --set fullnameOverride=ethlogger \ --output-dir $tmpdir \ $ETHLOGGER_CHART_DIR templates="configmap.yaml deployment.yaml serviceaccount.yaml" for f in $templates; do fullpath=$tmpdir/ethlogger/templates/$f cat $fullpath | \ sed -e 's/[[:space:]]*$//' | \ grep -v 'chart: ethlogger' | \ grep -v 'namespace: ethlogger' | \ grep -v 'heritage: Helm' | \ grep -iv 'release: ethlogger' \ > $outdir/$f echo "Wrote resource $f" done rm -rf $tmpdir/* } render $SCRIPT_DIR
true
2bf979d8f116182c6780f82fe73353840b1c3344
Shell
iot-chalmers/iot-testbed
/raspi/scripts/cc2538dk/serialdump.sh
UTF-8
257
2.640625
3
[]
no_license
#!/bin/bash log_path=$1 tty_path=`ls /dev/serial/by-id/usb-FTDI_FT231X_USB_UART_*` nohup ~/scripts/cc2538dk/contiki-serialdump -b115200 $tty_path | ~/scripts/cc2538dk/contiki-timestamp > $log_path & > /dev/null 2> /dev/null sleep 1 ps | grep "$! " exit $?
true
766afa06ff9980dc9df46ad22c8c4ae7712d9b0f
Shell
davisking/dlib
/docs/makedocs
UTF-8
9,275
3.890625
4
[ "BSL-1.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#!/bin/bash . bash_helper_functions report_failure () { echo " **** failed to complete **** " exit 1 } htmlify_python_file () { pygmentize -f html -O full,style=vs $1 > $1.html } add_links_between_example_programs() { EXT=$3 # Get the list of example program filenames pushd $1 > /dev/null FILES=`ls *.$EXT` popd > /dev/null # Now run sed on all the htmlified example programs to add the links between them. for f in $FILES do #escape the . in the filename escaped_name=`echo $f | sed -e 's/\./\\\./g'` pushd $1 > /dev/null # get a list of all the html example files that contain the name matching_html_files=`grep -e "\b$escaped_name\b" -l *.$EXT | sed -e "s/\.$EXT\b/.$EXT.html/g"` popd > /dev/null # now actually run sed to add the links pushd $2 > /dev/null if [ -n "$matching_html_files" ] then sed -i -e "s/\b$escaped_name\b/<a href=\"$escaped_name.html\">$escaped_name<\/a>/g" $matching_html_files fi popd > /dev/null done } htmlify_cmake () { echo "<html><head><title>" > $1.html; echo $1 >> $1.html; echo "</title></head><body bgcolor='white'><pre>" >> $1.html; # line 1: make comments green # line 2: add links into the add_subdirectory directives # line 3: make literal quotes red # line 4: make the directives show up blue # line 5: make variable names show up purple sed -e "s/^\([ ]*#.*\)/<font color='#009900'>\1<\/font>/" \ -e "s/add_subdirectory\([ ]*\)(\([ ]*\)\([^ ]*\)\([ ]*\)\([^ )]*\)/add_subdirectory\1(\2\3\4<a href='\3\/CMakeLists.txt.html'>\5<\/a>/" \ -e "s/\"\([^\"]*\)\"/\"<font color='#CC0000'>\1<\/font>\"/g" \ -e "s/^\([ ]*[^( ]*[ ]*\)(/<font color='blue'>\1<\/font>(/" \ -e "s/{\([^}]*\)}/\{<font color='#BB00BB'>\1<\/font>}/g" \ $1 >> $1.html; echo "</pre></body></html>" >> $1.html; } htmlify_python() { FILES=`\ls $1/*.py` for i in $FILES do htmlify_python_file ${i} rm ${i} done } makedocs () { REVNUM_FILE=.logger_revnum LOGGER_REVNUM=`cat $REVNUM_FILE` XSLT_OPTIONS="--nodtdattr --nonet --novalid" DATE_TODAY=`date --date= "+%b %d, %Y"`; if [ "$1" = "makerel" ] then RELEASE=${MAJOR_NUM}.${MINOR_NUM} else RELEASE=${MAJOR_NUM}.${MINOR_NUM}.${PATCH_NUM} fi; # get XML versions of the change logs echo Getting the git change logs for $LOGGER_REVNUM..HEAD git_logs_as_xml $LOGGER_REVNUM..HEAD docs/git-logs.xml || report_failure # grab a clean copy of the repository rm -rf docs/cache rm -rf docs/web rm -rf docs/chm/docs cd .. mkdir -p docs/docs/cache git archive HEAD | tar -xC docs/docs/cache cd docs rm -rf docs/cache/docs CHANGESET_ID=`git log -1 --pretty=format:%H` echo "#ifndef DLIB_REVISION_H" > docs/cache/dlib/revision.h echo "// Version: " $RELEASE >> docs/cache/dlib/revision.h echo "// Date: " `date` >> docs/cache/dlib/revision.h echo "// Git Changeset ID: " $CHANGESET_ID >> docs/cache/dlib/revision.h echo "#define DLIB_MAJOR_VERSION " $MAJOR_NUM >> docs/cache/dlib/revision.h echo "#define DLIB_MINOR_VERSION " $MINOR_NUM >> docs/cache/dlib/revision.h echo "#define DLIB_PATCH_VERSION " $PATCH_NUM >> docs/cache/dlib/revision.h echo "#endif" >> docs/cache/dlib/revision.h rm -rf docs/web rm -rf docs/chm/docs mkdir docs/web mkdir docs/chm/docs echo Creating HTML version of the source htmlify --title "dlib C++ Library - " -i docs/cache -o htmltemp.$$ add_links_between_example_programs docs/cache/examples htmltemp.$$/examples cpp echo Copying files around... cp -r htmltemp.$$/dlib docs/web cp -r htmltemp.$$/dlib docs/chm/docs cp -r htmltemp.$$/examples/* docs/web cp -r htmltemp.$$/examples/* docs/chm/docs rm -rf htmltemp.$$ # create python docs unless you say ./makedocs fast if [ "$1" != "fast" ] then cd .. python setup.py build || report_failure python setup.py build_sphinx -c docs/docs/python --build-dir docs/sphinx.$$ || report_failure # sphinx will read in the _dlib_pybind11 module and use that to name everything. But that's # not what we want, so we rename that to dlib everywhere. You would think sphinx would be # able to deal with the dlib/__init__.py file and this wouldn't be necessary, but that # doesn't seem to be the case. find docs/sphinx.$$ -type f | xargs sed -i -e "s/_dlib_pybind11/dlib/g" cd docs cp -r sphinx.$$/html docs/web/python mv sphinx.$$/html docs/chm/docs/python rm -rf sphinx.$$ fi; cp docs/cache/dlib/test/makefile docs/web/dlib/test cp docs/cache/dlib/test/makefile docs/chm/docs/dlib/test cp docs/cache/dlib/test/CMakeLists.txt docs/web/dlib/test cp docs/cache/dlib/test/CMakeLists.txt docs/chm/docs/dlib/test cp docs/cache/dlib/CMakeLists.txt docs/web/dlib cp docs/cache/dlib/CMakeLists.txt docs/chm/docs/dlib mkdir docs/web/examples || report_failure cp docs/cache/examples/CMakeLists.txt docs/web/examples mkdir docs/chm/docs/examples || report_failure cp docs/cache/examples/CMakeLists.txt docs/chm/docs/examples cp docs/cache/python_examples/*.py docs/chm/docs/ cp docs/cache/python_examples/*.py docs/web/ htmlify_python docs/chm/docs/ htmlify_python docs/web/ add_links_between_example_programs docs/cache/python_examples docs/chm/docs py add_links_between_example_programs docs/cache/python_examples docs/web py cp docs/*.gif docs/web cp docs/*.gif docs/chm/docs cp docs/ml_guide.svg docs/web cp docs/ml_guide.svg docs/chm/docs cp -r docs/guipics docs/web cp -r docs/guipics docs/chm/docs cp -r docs/images docs/web cp -r docs/images docs/chm/docs cp docs/*.html docs/web cp docs/*.html docs/chm/docs cp docs/*.css docs/web cp docs/*.css docs/chm/docs cp docs/*.js docs/web cp docs/*.js docs/chm/docs cp docs/*.png docs/web cp docs/*.pdf docs/web cp docs/*.jpg docs/web cp docs/*.webm docs/web cp docs/*.ico docs/web cp docs/*.png docs/chm/docs cp docs/*.pdf docs/chm/docs cp docs/*.jpg docs/chm/docs cp docs/*.webm docs/chm/docs cp docs/*.ico docs/chm/docs cd docs/chm/docs || report_failure htmlify_cmake dlib/CMakeLists.txt; htmlify_cmake examples/CMakeLists.txt; htmlify_cmake dlib/test/CMakeLists.txt; cd ../../.. || report_failure cd docs/web || report_failure htmlify_cmake dlib/CMakeLists.txt; htmlify_cmake examples/CMakeLists.txt; htmlify_cmake dlib/test/CMakeLists.txt; cd ../.. || report_failure find docs/web docs/chm -name "CMakeLists.txt" | xargs rm # generate the HTML docs echo Generate HTML docs from XML and XSLT style sheet FILES=`\ls docs/*.xml | grep -v main_menu.xml` for i in $FILES do # The last modified date for these files should always be the release date (regardless of when the actual xml files were modified). if [ "${i}" = "docs/release_notes.xml" -o ${i} = "docs/old_release_notes.xml" \ -o ${i} = "docs/change_log.xml" -o ${i} = "docs/index.xml" ] then DATE=$DATE_TODAY else get_last_modified_date ${i} DATE=$RESULT fi; #make web version cat docs/stylesheet.xsl | sed -e 's/"is_chm">[^<]*/"is_chm">false/' -e "s/_CURRENT_RELEASE_/$RELEASE/" -e "s/_LAST_MODIFIED_DATE_/$DATE/" \ > docs/stylesheet.$$.xsl OUT_FILE=$(echo ${i} | sed -e "s/\.xml/\.html/" | sed -e "s/docs\//docs\/web\//") xsltproc $XSLT_OPTIONS -o $OUT_FILE docs/stylesheet.$$.xsl ${i} #make chm version cat docs/stylesheet.xsl | sed -e 's/"is_chm">[^<]*/"is_chm">true/' -e "s/_CURRENT_RELEASE_/$RELEASE/" -e "s/_LAST_MODIFIED_DATE_/$DATE/" \ > docs/stylesheet.$$.xsl OUT_FILE=$(echo ${i} | sed -e "s/\.xml/\.html/" | sed -e "s/docs\//docs\/chm\/docs\//") xsltproc $XSLT_OPTIONS -o $OUT_FILE docs/stylesheet.$$.xsl ${i} rm docs/stylesheet.$$.xsl done # Delete doc type header stuff # FILES=`find docs/chm docs/web -iname "*.html" -type f` # for i in $FILES # do # sed -e '/<!DOCTYPE/d' ${i} > temp.$$; # mv temp.$$ ${i}; # done echo Generating sitemap cd docs/web || report_failure find . -name "*.html" | awk '{ print "http://dlib.net" substr($1,2)}' > sitemap.txt # make the main index have a 301 redirect. Use php to do this echo '<?php if ($_SERVER["SERVER_NAME"] != "dlib.net") { header("Location: http://dlib.net/", true, 301); exit; } ?>' > index.php cat index.html >> index.php rm index.html cd ../.. } ./testenv || report_failure # build all the html documentation makedocs $1 # now make the table of contents for the chm file echo Generating the table of contents for the chm file xsltproc -o docs/chm/Table\ of\ Contents.hhc docs/chm/htmlhelp_stylesheet.xsl docs/chm/toc.xml
true
b9bc90bfc6fde74f06779690ef0ef5cac7054699
Shell
stjordanis/rapids-compose
/scripts/06-setup-python-intellisense.sh
UTF-8
2,669
3.328125
3
[]
no_license
#!/usr/bin/env bash set -Eeo pipefail COMPOSE_HOME=$(dirname $(realpath "$0")) COMPOSE_HOME=$(realpath "$COMPOSE_HOME/../") RAPIDS_HOME=$(realpath "$COMPOSE_HOME/../") cd "$RAPIDS_HOME" PYTHON_DIRS="${PYTHON_DIRS:-rmm/python raft/python \ cuml/python cugraph/python cudf/python/cudf cudf/python/dask_cudf cuspatial/python/cuspatial}" cat << EOF > "$COMPOSE_HOME/etc/rapids/.vscode/python-settings.json" { "python.analysis.memory.keepLibraryAst": true, "python.analysis.memory.keepLibraryLocalVariables": true, "python.autoComplete.extraPaths": [ "$RAPIDS_HOME/rmm/python", "$RAPIDS_HOME/raft/python", "$RAPIDS_HOME/cudf/python/cudf", "$RAPIDS_HOME/cudf/python/dask_cudf", "$RAPIDS_HOME/cuml/python", "$RAPIDS_HOME/cugraph/python", "$RAPIDS_HOME/cuspatial/python/cuspatial", ], "python.languageServer": "Pylance", "python.condaPath": "$COMPOSE_HOME/etc/conda/bin/conda", "python.pythonPath": "$COMPOSE_HOME/etc/conda/envs/rapids/bin/python" } EOF for PYDIR in $PYTHON_DIRS; do mkdir -p "$RAPIDS_HOME/$PYDIR/.vscode" # Symlink the python-settings.json file from compose/etc/rapids/ ln -f -s "$COMPOSE_HOME/etc/rapids/.vscode/python-settings.json" "$RAPIDS_HOME/$PYDIR/.vscode/settings.json" cat << EOF > "$RAPIDS_HOME/$PYDIR/.vscode/launch.json" { "version": "0.2.0", "configurations": [ { "name": "$PYDIR", "type": "python", "request": "attach", "port": 5678, "host": "localhost", "pathMappings": [{ "localRoot": "\${workspaceFolder}", "remoteRoot": "\${workspaceFolder}" }] } ] } EOF done ask_before_install() { while true; do read -p "$1 " CHOICE </dev/tty case $CHOICE in [Nn]* ) break;; [Yy]* ) eval $2; break;; * ) echo "Please answer 'y' or 'n'";; esac done } install_vscode_extensions() { CODE="$1" for EXT in ${@:2}; do if [ -z "$($CODE --list-extensions | grep $EXT)" ]; then ask_before_install \ "Missing $CODE extension $EXT. Install $EXT now? (y/n)" \ "$CODE --install-extension $EXT" fi done } for CODE in code code-insiders; do if [ "$(which $CODE)" != "" ]; then install_vscode_extensions "$CODE" \ "ms-python.python" \ "guyskk.language-cython"; fi done
true
55e997d8b92dd026a63f6e3376840f9047e4164b
Shell
petronny/aur3-mirror
/facebookplugin/PKGBUILD
UTF-8
718
2.53125
3
[]
no_license
# Maintainer: renart <lerenart at gmail dot com> pkgname=facebookplugin pkgver=1.0.1 pkgrel=1 pkgdesc='Facebook photo upload plugin.' url='http://www.facebook.com' arch=('i686') depends=('libjpeg6' 'libpng12' 'libtiff4') license=('custom') source=(http://www.facebook.com/fbplugin/linux-x86/install/FacebookPlugIn-${pkgver}.tar.gz) md5sums=('e0134daf9dffef6c85ceb3209d812823') build() { cd ${srcdir}/FacebookPlugIn-${pkgver} install -d -m755 ${pkgdir}/usr/lib/mozilla/plugins/ || return 1 install -m755 libnpfbook_1_0_1.so ${pkgdir}/usr/lib/mozilla/plugins/ || return 1 install -d -m755 ${pkgdir}/usr/share/licenses/${pkgname}/ || return 1 install -m644 LICENSE ${pkgdir}/usr/share/licenses/${pkgname}/LICENSE || return 1 }
true
97b55a6dc4e9e64e795223c34678073409310ad0
Shell
spacelephant/www.spacelephant.org
/publish_ghpages.sh
UTF-8
969
3.703125
4
[]
no_license
#!/bin/bash PUBLISH_BRANCH="gh-pages" if [[ $(git status -s) ]] then echo "The working directory is dirty. Please commit any pending changes." exit 1; fi echo "Deleting old publication" rm -rf public mkdir public git worktree prune rm -rf .git/worktrees/public/ echo "Checking out ${PUBLISH_BRANCH} branch into public" git worktree add -B $PUBLISH_BRANCH public origin/$PUBLISH_BRANCH echo "Removing existing files" rm -rf public/* echo "Generating site" yarn build echo "Updating ${PUBLISH_BRANCH} branch" cd public echo www.spacelephant.org > CNAME git add --all && git commit -m "[CI]: Publishing master (publish.sh) | $(date -u '+%Y-%m-%d %H:%M:%S %Z')" echo "Push to ${PUBLISH_BRANCH} branch" while true; do read -p "Do you really want to deploy that version on ${PUBLISH_BRANCH}?" yn case $yn in [Yy]* ) git push origin ${PUBLISH_BRANCH}; break;; [Nn]* ) exit;; * ) echo "Please answer yes or no.";; esac done
true
7395a514ebb4a1b90df86feadec8cea306d3f2cd
Shell
wido/cloudstack-package-docker-deb
/dists/ubuntu1404/build.sh
UTF-8
342
3.09375
3
[]
no_license
#!/bin/bash set -e if [ ! -d "cloudstack" ]; then echo "Could not find directory 'cloudstack'" exit 1 fi cd cloudstack VERSION=$(grep '^ <version>' pom.xml| cut -d'>' -f2 |cut -d'<' -f1) DISTCODE=$(lsb_release -sc) dch -b -v ${VERSION}~${DISTCODE} -u low -m "Apache CloudStack Release $VERSION" dpkg-buildpackage -j2 -b -uc -us
true