content
stringlengths 1
103k
⌀ | path
stringlengths 8
216
| filename
stringlengths 2
179
| language
stringclasses 15
values | size_bytes
int64 2
189k
| quality_score
float64 0.5
0.95
| complexity
float64 0
1
| documentation_ratio
float64 0
1
| repository
stringclasses 5
values | stars
int64 0
1k
| created_date
stringdate 2023-07-10 19:21:08
2025-07-09 19:11:45
| license
stringclasses 4
values | is_test
bool 2
classes | file_hash
stringlengths 32
32
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
steps:\n - bash: |\n rm global.json\n cp build/ci/net9/global.json global.json\n displayName: "Using .NET 9 global.json"\n\n - pwsh: |\n echo "##vso[task.setvariable variable=DOTNET_INSTALL_DIR;]$(Build.SourcesDirectory)/.dotnet"\n displayName: "Set DOTNET_INSTALL_DIR for macOS/Linux"\n condition: or(eq(variables['Agent.OS'], 'Linux'), eq(variables['Agent.OS'], 'Darwin'))\n\n - pwsh: |\n echo "##vso[task.setvariable variable=DOTNET_INSTALL_DIR;]$(Build.SourcesDirectory)\.dotnet"\n displayName: "Set DOTNET_INSTALL_DIR for Windows"\n condition: eq(variables['Agent.OS'], 'Windows_NT')\n\n - task: UseDotNet@2\n displayName: 'Use .NET SDK'\n retryCountOnTaskFailure: 3\n inputs:\n packageType: sdk\n useGlobalJson: true\n includePreviewVersions: true\n installationPath: $(DOTNET_INSTALL_DIR)\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\dotnet-install.yml
|
dotnet-install.yml
|
YAML
| 841 | 0.8 | 0.083333 | 0 |
vue-tools
| 83 |
2024-08-31T16:28:15.076970
|
MIT
| false |
47242d07d98da5ee7584032fb639d9e1
|
parameters:\n UnoCheckParameters: ''\n\nsteps:\n\n - template: dotnet-install.yml\n\n - bash: |\n ubuntu_release=`lsb_release -rs`\n wget https://packages.microsoft.com/config/ubuntu/${ubuntu_release}/packages-microsoft-prod.deb -O packages-microsoft-prod.deb\n sudo dpkg -i packages-microsoft-prod.deb\n sudo apt-get install apt-transport-https\n sudo apt-get update\n sudo apt-get install -y msopenjdk-11\n sudo update-java-alternatives --set msopenjdk-11-amd64\n displayName: Install OpenJDK 11\n retryCountOnTaskFailure: 3\n\n - task: Cache@2\n condition: eq(variables['enable_dotnet_cache'], 'true')\n inputs:\n key: dotnet | "$(Agent.OS)" | "$(Agent.JobName)" | "$(GlobalUnoCheckVersion)" | "${{ parameters.UnoCheckParameters }}"\n path: $(DOTNET_INSTALL_DIR)\n displayName: Set Cache for dotnet install\n\n - bash: |\n dotnet tool update --global uno.check --version $(GlobalUnoCheckVersion) --add-source https://api.nuget.org/v3/index.json\n uno-check --verbose --ci --non-interactive --fix --skip gtk3 --skip dotnetnewunotemplates --skip androidemulator --skip maui --skip unosdk --pre-major ${{ parameters.UnoCheckParameters }}\n displayName: Install .NET Workloads\n retryCountOnTaskFailure: 3\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\dotnet-mobile-install-linux.yml
|
dotnet-mobile-install-linux.yml
|
YAML
| 1,257 | 0.8 | 0.033333 | 0 |
react-lib
| 61 |
2024-03-12T04:56:41.088982
|
BSD-3-Clause
| false |
7b287c0e116e8dcf0eea72909393fff9
|
parameters:\n UnoCheckParameters: ''\n\nsteps:\n\n - template: dotnet-install.yml\n\n - template: jdk-setup.yml\n\n - task: Cache@2\n condition: eq(variables['enable_dotnet_cache'], 'true')\n inputs:\n key: dotnet | "$(Agent.OS)" | "$(Agent.JobName)" | "$(GlobalUnoCheckVersion)" | "${{ parameters.UnoCheckParameters }}"\n path: $(DOTNET_INSTALL_DIR)\n displayName: Set Cache for dotnet install\n\n - powershell: |\n & dotnet tool update --global uno.check --version $(GlobalUnoCheckVersion) --add-source https://api.nuget.org/v3/index.json\n & uno-check -v --ci --non-interactive --fix --skip androidemulator --skip xcode --skip gtk3 --skip vswin --skip maui --skip vsmac --skip unosdk --skip dotnetnewunotemplates --pre-major ${{ parameters.UnoCheckParameters }}\n displayName: Install .NET Workloads\n errorActionPreference: continue\n ignoreLASTEXITCODE: true\n retryCountOnTaskFailure: 3\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\dotnet-mobile-install-mac.yml
|
dotnet-mobile-install-mac.yml
|
YAML
| 917 | 0.8 | 0.043478 | 0 |
node-utils
| 42 |
2025-05-15T13:55:17.265173
|
Apache-2.0
| false |
021f6f5761ba2a9cb208555344d653e4
|
parameters:\n UnoCheckParameters: ''\n\nsteps:\n\n - template: dotnet-install.yml\n\n - template: jdk-setup.yml\n\n - task: Cache@2\n condition: eq(variables['enable_dotnet_cache'], 'true')\n inputs:\n key: dotnet | "$(Agent.OS)" | "$(Agent.JobName)" | "$(GlobalUnoCheckVersion)" | "${{ parameters.UnoCheckParameters }}"\n path: $(DOTNET_INSTALL_DIR)\n displayName: Set Cache for dotnet install\n\n - powershell: |\n & dotnet tool update --global uno.check --version $(GlobalUnoCheckVersion) --add-source https://api.nuget.org/v3/index.json\n & uno-check -v --ci --non-interactive --fix --skip androidemulator --skip xcode --skip gtk3 --skip maui --skip vswin --skip vsmac --skip unosdk --skip dotnetnewunotemplates --pre-major ${{ parameters.UnoCheckParameters }}\n displayName: Install .NET Workloads\n errorActionPreference: continue\n ignoreLASTEXITCODE: true\n retryCountOnTaskFailure: 3\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\dotnet-mobile-install-windows.yml
|
dotnet-mobile-install-windows.yml
|
YAML
| 917 | 0.8 | 0.043478 | 0 |
awesome-app
| 253 |
2024-04-02T22:31:01.202499
|
Apache-2.0
| false |
7cd7d9f734ddef015c167e9baebd4ebd
|
steps:\n\n - bash: |\n npm install -g [email protected]\n conventional-changelog -p angular -u -r 2 -o "build/CHANGELOG.md"\n # sed -r -i 's/\[(.*?)\]\((.*?)\)/\2/g' build/CHANGELOG.md\n displayName: 'Generate CHANGELOG.MD'\n \n - task: CopyFiles@2\n displayName: Copy changelog\n condition: always()\n inputs:\n SourceFolder: $(build.sourcesdirectory)/build\n Contents: 'CHANGELOG.md'\n TargetFolder: $(build.artifactstagingdirectory)\n\n - task: PublishBuildArtifacts@1\n condition: always()\n retryCountOnTaskFailure: 3\n inputs:\n PathtoPublish: $(build.artifactstagingdirectory)\n ArtifactName: NugetPackages-Artifacts\n ArtifactType: Container\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\generate-changelog.yml
|
generate-changelog.yml
|
YAML
| 715 | 0.8 | 0 | 0.05 |
awesome-app
| 57 |
2023-10-17T13:02:44.638547
|
BSD-3-Clause
| false |
2eb78de0ff9cb6433ac244677cdfb91c
|
parameters:\n skipCacheDownload: ''\n\nsteps:\n\n - pwsh: |\n dotnet tool uninstall nbgv -g\n dotnet tool install nbgv -g --version 3.6.139\n nbgv cloud -a\n displayName: Version with NBGV\n name: NBGV\n\n - pwsh: |\n $InformationalVersion="$(NBGV_SemVer2)+$(NBGV_GitCommitId)-$(NBGV_BuildingRef)".Replace("refs/heads/","").Replace("/","-")\n echo "##vso[task.setvariable variable=NBGV_AssemblyInformationalVersion;]$InformationalVersion"\n echo "##vso[task.setvariable variable=NBGV_AssemblyInformationalVersion;isOutput=true;]$InformationalVersion"\n echo "Informational Version: $InformationalVersion"\n\n displayName: Generate Informational Version\n name: NBGV_AssemblyInformationalVersion\n\n - powershell: | \n $outputFile="$(System.artifactsdirectory)/git_variables"\n echo "##vso[task.setvariable variable=NBGV_CloudBuildNumber]$(NBGV_CloudBuildNumber)" > $outputFile\n echo "##vso[task.setvariable variable=NBGV_VersionFileFound]$(NBGV_VersionFileFound)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_AssemblyVersion]$(NBGV_AssemblyVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_AssemblyFileVersion]$(NBGV_AssemblyFileVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_AssemblyInformationalVersion]$(NBGV_AssemblyInformationalVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_PublicRelease]$(NBGV_PublicRelease)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_PrereleaseVersion]$(NBGV_PrereleaseVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_PrereleaseVersionNoLeadingHyphen]$(NBGV_PrereleaseVersionNoLeadingHyphen)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_SimpleVersion]$(NBGV_SimpleVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_BuildNumber]$(NBGV_BuildNumber)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_VersionRevision]$(NBGV_VersionRevision)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_MajorMinorVersion]$(NBGV_MajorMinorVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_VersionMajor]$(NBGV_VersionMajor)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_VersionMinor]$(NBGV_VersionMinor)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_GitCommitId]$(NBGV_GitCommitId)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_VersionSourceCommitId]$(NBGV_GitCommitId)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_GitCommitIdShort]$(NBGV_GitCommitIdShort)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_GitCommitDate]$(NBGV_GitCommitDate)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_VersionHeight]$(NBGV_VersionHeight)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_VersionHeightOffset]$(NBGV_VersionHeightOffset)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_BuildingRef]$(NBGV_BuildingRef)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_Version]$(NBGV_Version)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_BuildMetadataFragment]$(NBGV_BuildMetadataFragment)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_NuGetPackageVersion]$(NBGV_NuGetPackageVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_ChocolateyPackageVersion]$(NBGV_ChocolateyPackageVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_NpmPackageVersion]$(NBGV_NpmPackageVersion)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_SemVer1]$(NBGV_SemVer1)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_SemVer2]$(NBGV_SemVer2)" >> $outputFile\n echo "##vso[task.setvariable variable=NBGV_SemVer1NumericIdentifierPadding]$(NBGV_SemVer1NumericIdentifierPadding)" >> $outputFile\n displayName: Generate NBGV Variables\n\n - task: PublishBuildArtifacts@1\n retryCountOnTaskFailure: 3\n displayName: Publish variables\n inputs:\n PathtoPublish: $(System.artifactsdirectory)\n ArtifactName: nbgv_cache\n ArtifactType: Container\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\gitversion-run.yml
|
gitversion-run.yml
|
YAML
| 4,184 | 0.8 | 0 | 0 |
python-kit
| 932 |
2024-07-14T15:20:14.767091
|
Apache-2.0
| false |
e5a74004b71b3db1f33721889ed432b1
|
steps:\n\n - task: DownloadBuildArtifacts@0\n displayName: 'Download NBGV Cache'\n retryCountOnTaskFailure: 3\n inputs:\n artifactName: nbgv_cache\n downloadPath: '$(build.sourcesdirectory)/build'\n\n # Sets the variables generated by the NBGV task\n # We cannot reliably use pipeline output variables as they can't\n # be used anywhere by variable sections declarations.\n - powershell: |\n $NBGVVariables = Get-Content $(build.sourcesdirectory)/build/nbgv_cache/git_variables\n foreach ($line in $NBGVVariables)\n {\n Write-Host $line\n }\n\n displayName: 'Set NBGV variables'\n\n\n\n\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\gitversion.yml
|
gitversion.yml
|
YAML
| 622 | 0.8 | 0 | 0.176471 |
vue-tools
| 12 |
2023-11-06T02:44:58.387190
|
BSD-3-Clause
| false |
baf2e9e259dd15c5a7ddf86e43dfad5c
|
parameters:\n GtkRuntimeUrl: 'https://github.com/tschoonj/GTK-for-Windows-Runtime-Environment-Installer/releases/download/2020-07-15/gtk3-runtime-3.24.20-2020-07-15-ts-win64.exe'\n\nsteps:\n - powershell: |\n brew install gtk+3\n\n displayName: Install GTK3 runtime\n retryCountOnTaskFailure: 3\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\gtk-install-macos.yml
|
gtk-install-macos.yml
|
YAML
| 302 | 0.8 | 0.111111 | 0 |
react-lib
| 493 |
2024-06-09T19:51:04.084139
|
GPL-3.0
| false |
8de9f7e24f441b778389978334840081
|
parameters:\n GtkRuntimeUrl: 'https://github.com/tschoonj/GTK-for-Windows-Runtime-Environment-Installer/releases/download/2020-07-15/gtk3-runtime-3.24.20-2020-07-15-ts-win64.exe'\n\nsteps:\n - powershell: |\n $source = "${{ parameters.GtkRuntimeUrl }}"\n $destination = "gtk3-runtime.exe"\n Invoke-WebRequest $source -OutFile $destination\n Start-Process -FilePath "gtk3-runtime.exe" -Wait -PassThru -ArgumentList /S\n Write-Host "##vso[task.setvariable variable=PATH;]${env:PATH};C:\Program Files\GTK3-Runtime Win64\bin";\n\n displayName: Install GTK3 runtime\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\gtk-install-windows.yml
|
gtk-install-windows.yml
|
YAML
| 590 | 0.8 | 0.083333 | 0 |
awesome-app
| 482 |
2023-09-15T22:02:19.128511
|
GPL-3.0
| false |
87b0aaff3d3e84ed0e3772f6ce87c6ad
|
parameters:\n xCodeRoot: ''\n\nsteps:\n - bash: |\n echo 'xCode Root to ${{parameters.xCodeRoot}}'\n echo '##vso[task.setvariable variable=MD_APPLE_SDK_ROOT;]'${{parameters.xCodeRoot}}\n sudo xcode-select --switch ${{parameters.xCodeRoot}}/Contents/Developer\n\n displayName: Select Xcode\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\ios-build-select-version.yml
|
ios-build-select-version.yml
|
YAML
| 300 | 0.8 | 0.1 | 0 |
python-kit
| 900 |
2025-06-16T11:43:56.475289
|
MIT
| false |
7b1d124287717252de73b1176ae76c9a
|
steps:\n\n - pwsh: |\n echo "##vso[task.setvariable variable=JAVA_HOME]$(JAVA_HOME_11_X64)"\n echo "##vso[task.setvariable variable=JavaSdkDirectory]$(JAVA_HOME_11_X64)"\n displayName: Select JDK 11\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\jdk-setup.yml
|
jdk-setup.yml
|
YAML
| 208 | 0.8 | 0 | 0 |
awesome-app
| 258 |
2024-01-31T09:25:06.267944
|
BSD-3-Clause
| false |
5eafea639eed27221edf3ff95140489a
|
steps:\n\n - bash: |\n sudo apt-get update\n sudo apt-get install -y xvfb fluxbox vlc libvlc-dev libgtk-3-dev libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev\n displayName: Install Linux dependencies\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\linux-install-deps.yml
|
linux-install-deps.yml
|
YAML
| 205 | 0.7 | 0 | 0 |
node-utils
| 812 |
2025-02-04T21:07:47.404799
|
Apache-2.0
| false |
3114adee9cdf12fd59f154363c01ac90
|
steps:\n\n - bash: |\n sudo apt-get update\n sudo apt-get install -y zip\n displayName: Install zip\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\linux-install-zip.yml
|
linux-install-zip.yml
|
YAML
| 109 | 0.7 | 0 | 0 |
node-utils
| 647 |
2025-04-23T00:04:22.469442
|
Apache-2.0
| false |
07fe731ee74942a28e34d9b31d3e2d4a
|
parameters:\n nugetPackages: '$(Pipeline.Workspace)/.nuget/packages'\n\nsteps:\n\n - powershell: |\n New-Item -ItemType Directory -Force "${{ parameters.nugetPackages }}"\n displayName: Create nuget packages cache folder\n\n - script: |\n compact /c "${{ parameters.nugetPackages }}"\n\n condition: and(succeeded(), eq( variables['Agent.OS'], 'Windows_NT' ))\n displayName: Compress nuget package cache folder\n\n - task: Cache@2\n condition: eq(variables['enable_package_cache'], 'true')\n inputs:\n key: nuget | $(Agent.OS) | $(Agent.JobName) | $(build.sourcesdirectory)/**/*.csproj | $(build.sourcesdirectory)/**/Directory.Build.targets | $(build.sourcesdirectory)/**/Directory.Build.props\n path: ${{ parameters.nugetPackages }}\n displayName: Restore NuGet packages cache\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\nuget-cache.yml
|
nuget-cache.yml
|
YAML
| 799 | 0.8 | 0 | 0 |
vue-tools
| 471 |
2025-03-15T14:16:30.321496
|
BSD-3-Clause
| false |
761f06e97fe03d0da839571f15237d06
|
steps:\n - task: NuGetCommand@2\n displayName: 'Publish to Uno Dev Feed'\n condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/master'), not(eq(variables['build.reason'], 'PullRequest')))\n inputs:\n command: 'push'\n packagesToPush: '$(Pipeline.Workspace)/Nuget_Packages/**/*.nupkg'\n nuGetFeedType: 'internal'\n publishVstsFeed: '1dd81cbd-cb35-41de-a570-b0df3571a196/e7ce08df-613a-41a3-8449-d42784dd45ce'\n allowPackageConflicts: true\n verbosityPush: 'Normal'\n\n - task: NuGetCommand@2\n displayName: 'Publish to Uno Feature Feed'\n condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/heads/feature'), not(eq(variables['build.reason'], 'PullRequest')))\n inputs:\n command: 'push'\n packagesToPush: '$(Pipeline.Workspace)/Nuget_Packages/**/*.nupkg'\n nuGetFeedType: 'internal'\n publishVstsFeed: '1dd81cbd-cb35-41de-a570-b0df3571a196/d26abad4-c545-4e56-9ac7-fe42c6311c28'\n allowPackageConflicts: true\n verbosityPush: 'Normal'\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\nuget-publish-dev.yml
|
nuget-publish-dev.yml
|
YAML
| 1,041 | 0.8 | 0 | 0 |
vue-tools
| 500 |
2024-08-21T18:26:46.715608
|
Apache-2.0
| false |
c7ce8707620b5a5c0c7fdb88f035f085
|
steps:\n - task: NuGetCommand@2\n condition: and(succeeded(), not(startsWith(variables['Build.SourceBranch'], 'refs/heads/feature')), not(eq(variables['build.reason'], 'PullRequest')))\n displayName: 'Publish to nuget.org'\n inputs:\n command: 'push'\n packagesToPush: '$(Pipeline.Workspace)/Nuget_Packages/**/*.nupkg'\n nuGetFeedType: 'external'\n publishFeedCredentials: 'nuget.org uno packages'\n verbosityPush: 'Normal'\n\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\nuget-publish-public.yml
|
nuget-publish-public.yml
|
YAML
| 451 | 0.8 | 0 | 0 |
node-utils
| 145 |
2024-09-11T15:27:06.029789
|
BSD-3-Clause
| false |
3ca84b9d90f72cbdf5d944918f61cc1b
|
parameters:\n nugetPackages: '$(Pipeline.Workspace)/.nuget/packages'\n\nsteps:\n\n - pwsh: dotnet nuget add source https://pkgs.dev.azure.com/uno-platform/1dd81cbd-cb35-41de-a570-b0df3571a196/_packaging/unoplatformdev/nuget/v3/index.json -n "uno-dev"\n displayName: Add dev feed source\n
|
dataset_sample\yaml\unoplatform_uno\build\ci\templates\uno-dev-feed.yml
|
uno-dev-feed.yml
|
YAML
| 285 | 0.8 | 0 | 0 |
awesome-app
| 126 |
2023-10-04T07:38:31.450974
|
Apache-2.0
| false |
3ebfa70992312724ccffdd6e48369057
|
- name: Get Started\n topicHref: xref:Uno.GetStarted\n items:\n - name: Setup\n topicHref: xref:Uno.GetStarted.vs2022\n items:\n - name: Visual Studio 2022 for Windows\n href: xref:Uno.GetStarted.vs2022\n - name: VS Code\n href: xref:Uno.GetStarted.vscode\n - name: Rider\n href: xref:Uno.GetStarted.Rider\n \n - name: Create an App\n topicHref: xref:Uno.GettingStarted.CreateAnApp.VS2022\n items:\n - name: Visual Studio 2022 for Windows\n href: xref:Uno.GettingStarted.CreateAnApp.VS2022\n - name: VS Code\n href: xref:Uno.GettingStarted.CreateAnApp.VSCode\n - name: Rider\n href: xref:Uno.GettingStarted.CreateAnApp.Rider\n\n - name: Write your app\n topicHref: xref:Uno.GetStarted.Explore\n items:\n - name: App Structure\n href: xref:Uno.Development.AppStructure\n - name: Hot Reload\n href: xref:Uno.Features.HotReload\n\n - name: Troubleshoot\n topicHref: xref:Uno.UI.CommonIssues\n items:\n - name: Overview\n href: xref:Uno.UI.CommonIssues\n - name: Sign in with Uno Platform\n href: xref:Uno.GetStarted.Licensing\n - name: Hot Reload\n topicHref: xref:Uno.Features.HotReload#troubleshooting\n - name: Development Environments\n topicHref: xref:Uno.UI.CommonIssues.AllIDEs\n items:\n - name: All Development Environments\n href: xref:Uno.UI.CommonIssues.AllIDEs\n - name: Visual Studio 2022 for Windows\n href: xref:Uno.UI.CommonIssues.vs2022\n - name: VS Code\n href: xref:Uno.UI.CommonIssues.vscode\n - name: Rider\n href: xref:Uno.UI.CommonIssues.rider\n - name: Platforms\n topicHref: xref:Uno.UI.CommonIssues.Wasm\n items:\n - name: WebAssembly\n href: xref:Uno.UI.CommonIssues.Wasm\n - name: Skia (Gtk/Wpf/Framebuffer)\n href: xref:Uno.UI.CommonIssues.Skia\n - name: iOS/mac Catalyst\n href: xref:Uno.UI.CommonIssues.IosCatalyst\n - name: Android\n href: xref:Uno.UI.CommonIssues.Android\n - name: Build Errors\n topicHref: xref:Uno.Development.Troubleshooting\n items:\n - name: Troubleshooting build errors\n href: xref:Uno.Development.Troubleshooting\n - name: Build error codes\n href: xref:Build.Solution.error-codes\n\n- name: Samples & Tutorials\n topicHref: xref:Uno.SamplesTutorials.Overview\n items:\n - name: Overview\n href: xref:Uno.SamplesTutorials.Overview\n - name: Counter\n href: xref:Uno.Workshop.Counter\n items:\n - name: Counter App\n href: xref:Uno.Workshop.Counter\n - name: XAML + MVUX\n href: xref:Uno.Workshop.Counter.XAML.MVUX\n - name: XAML + MVVM\n href: xref:Uno.Workshop.Counter.XAML.MVVM\n - name: C# Markup + MVUX\n href: xref:Uno.Workshop.Counter.CSharp.MVUX\n - name: C# Markup + MVVM\n href: xref:Uno.Workshop.Counter.CSharp.MVVM\n - name: Hot Design\n href: xref:Uno.HotDesign.GetStarted.CounterTutorial\n - name: Simple Calc\n topicHref: xref:Workshop.SimpleCalc.Overview\n href: external/workshops/simple-calc/toc.yml\n - name: Tube Player\n topicHref: xref:Workshop.TubePlayer.Overview\n href: external/workshops/tube-player/toc.yml\n - name: Tutorials\n topicHref: xref:Uno.Tutorials.Intro\n items:\n - name: Overview\n href: xref:Uno.Tutorials.Intro\n - name: How to deploy a WebAssembly app on Azure Static Web Apps\n href: xref:Uno.Tutorials.AzureStaticWepApps\n - name: How to use Windows Community Toolkit\n topicHref: xref:Uno.Development.CommunityToolkit\n items:\n - name: Windows Community Toolkit - Version 8.x\n href: xref:Uno.Development.CommunityToolkit.v8\n - name: Windows Community Toolkit - Version 7.x\n href: xref:Uno.Development.CommunityToolkit.v7\n - name: How to manually add a splash screen\n href: xref:Uno.Development.SplashScreen\n - name: How to use native Frame navigation\n href: xref:Uno.Tutorials.UseNativeFrameNav\n - name: How to consume a web API\n href: xref:Uno.Development.ConsumeWebApi\n - name: How to localize text resources\n href: xref:Uno.Tutorials.Localization\n - name: How to change app language at runtime\n href: xref:Uno.Tutorials.ChangeAppLanguage\n - name: How to integrate SignalR\n href: xref:Uno.Development.SignalR\n - name: How to update StatusBar color based on dark/light theme\n href: xref:Uno.Tutorials.StatusBarThemeColor\n - name: How to use MSAL for Azure Authentication\n href: xref:Uno.Interop.MSAL\n - name: How to authenticate with OpenID Connect\n href: xref:Uno.Tutorials.OpenIDConnect\n - name: How to implement a sign-in with Apple Button\n href: xref:Uno.Tutorials.SignInWithApple\n - name: Embed a JavaScript Component\n topicHref: xref:Uno.Interop.WasmJavaScript1\n items:\n - name: Part 1\n href: xref:Uno.Interop.WasmJavaScript1\n - name: Part 2\n href: xref:Uno.Interop.WasmJavaScript2\n - name: Part 3\n href: xref:Uno.Interop.WasmJavaScript3\n - name: Deploy to Raspberry Pi\n href: xref:Uno.RaspberryPi.Intro\n - name: (Wasm) Handling custom HTML events\n href: xref:Uno.Development.WasmCustomEvents\n - name: Community Tutorials\n topicHref: xref:Uno.Tutorials.CommunityTutorials\n - name: Samples\n href: xref:Uno.Samples\n - name: Additional Resources\n href: xref:Uno.GetStarted.NextSteps\n\n- name: Overview\n items:\n - name: Intro\n href: xref:Uno.Documentation.Intro\n - name: Architecture\n items:\n - name: Supported Platforms\n href: getting-started/requirements.md\n items:\n - name: .NET Versions\n href: net-version-support.md\n - name: Working with Skia Desktop\n href: xref:Uno.Skia.Desktop\n - name: Working with the Linux Framebuffer\n href: features/using-linux-framebuffer.md\n - name: Philosophy\n href: concepts/overview/philosophy-of-uno.md\n - name: How Uno Works\n href: how-uno-works.md\n - name: Best practices\n href: best-practices-uno.md\n\n - name: Authoring\n items:\n - name: Using the Uno.SDK\n href: xref:Uno.Features.Uno.Sdk\n - name: Platform-specific C# code\n href: platform-specific-csharp.md\n - name: Platform-specific XAML markup\n href: platform-specific-xaml.md\n - name: Creating Custom Controls\n href: guides/creating-custom-controls.md\n - name: Control Libraries\n href: guides/how-to-create-control-libraries.md\n - name: Adding New Platforms\n href: guides/how-to-add-platforms-existing-project.md\n - name: Getting Started With Tests\n href: xref:Uno.Authoring.Tests\n - name: Logging\n href: logging.md\n - name: AppManifest (WebAssembly)\n href: wasm-appmanifest.md\n - name: Hosting a WebAssembly app\n href: xref:Uno.Development.HostWebAssemblyApp\n - name: Using Skia Desktop (macOS)\n href: xref:Uno.Skia.macOS\n\n - name: Debugging\n items:\n - name: How to create a repro sample\n href: uno-howto-create-a-repro.md\n - name: 'Debugging C# on WASM'\n href: debugging-wasm.md\n - name: VS Code Advanced Mobile Debugging\n href: xref:uno.vscode.mobile.advanced.debugging\n - name: Generic Unhandled Exceptions handler\n href: xref:Uno.Development.UnhandledExceptions\n - name: Debugging Troubleshooting\n href: xref:Uno.Debugging.Troubleshooting\n\n - name: Embedding\n items:\n - name: Uno Platform in WPF\n href: guides/uno-islands.md\n - name: Native Views with Mobile\n href: native-views.md\n - name: Native Views with Skia \n href: xref:Uno.Skia.Embedding.Native\n\n - name: Upgrading\n items:\n - name: Upgrading NuGet Packages\n href: xref:Uno.Development.UpgradeUnoNuget\n - name: Upgrading from previous releases\n href: migrating-from-previous-releases.md\n - name: Upgrading to Single Project\n href: xref:Uno.Development.MigratingToSingleProject\n - name: Upgrading to Uno 5.0\n href: xref:Uno.Development.MigratingToUno5\n - name: Upgrading from .NET 8 to .NET 9\n href: xref:Uno.Development.MigratingFromNet8ToNet9\n - name: Upgrading from .NET 7 to .NET 8\n href: xref:Uno.Development.MigratingFromNet7ToNet8\n - name: Upgrading from Xamarin to .NET 6\n href: migrating-from-xamarin-to-net6.md\n - name: Upgrading to WinUI 3.0\n href: updating-to-winui3.md\n\n - name: Migrating\n items:\n - name: Xamarin Forms\n topicHref: xref:Uno.XamarinFormsMigration.Overview\n href: guides/xf-migration/toc.yml\n - name: WPF\n items:\n - name: Migrating WPF Apps to Web\n href: wpf-migration.md\n - name: UWP-only code\n items:\n - name: Overview\n href: howto-migrate-existing-code.md\n - name: Checklist\n href: migrating-before-you-start.md\n - name: Applications\n href: migrating-apps.md\n - name: Class libraries\n href: migrating-libraries.md\n - name: General guidance\n href: migrating-guidance.md\n - name: Silverlight\n topicHref: guides/silverlight-migration/silverlight-migration-landing.md\n href: guides/silverlight-migration/toc.yml\n\n - name: Publishing\n items:\n - name: Overview\n href: xref:uno.publishing.overview\n - name: Publishing for Desktop\n topicHref: xref:uno.publishing.desktop\n items:\n - name: Publishing Your App for Desktop\n href: xref:uno.publishing.desktop\n - name: Publishing Your App for macOS\n href: xref:uno.publishing.desktop.macos\n - name: Publishing Your App for macOS - Advanced Topics\n href: xref:uno.publishing.desktop.macos.advanced\n - name: Publishing Your App for Linux\n href: xref:uno.publishing.desktop.linux\n - name: Publishing for WebAssembly\n href: xref:uno.publishing.webassembly\n - name: Publishing for Windows App SDK\n topicHref: xref:uno.publishing.windows\n items:\n - name: Build Packaged Unsigned Apps\n href: xref:uno.publishing.windows.sideload.packaged.unsigned\n - name: Build Packaged Signed Apps\n href: xref:uno.publishing.windows.sideload.packaged.signed\n - name: Build Unpackaged Apps\n href: xref:uno.publishing.windows.sideload.unpackaged.unsigned\n - name: Publishing for iOS\n href: xref:uno.publishing.ios\n - name: Publishing for Android\n href: xref:uno.publishing.android\n - name: Publishing for Mac Catalyst\n href: xref:uno.publishing.maccatalyst\n\n - name: Performance\n items:\n - name: Improving Build Times\n href: xref:Build.Solution.TargetFramework-override\n - name: Performance tips\n href: Uno-UI-Performance.md\n - name: Memory Profiling\n href: xref:Uno.Contributing.MemoryIssues\n - name: How to profile applications\n href: guides/profiling-applications.md\n - name: IL Linker for WebAssembly\n href: xref:uno.articles.features.illinker\n - name: Working with String Resource Trimming\n href: features/upri-trimming.md\n - name: Working with XAML Trimming\n href: features/resources-trimming.md\n - name: Error Monitoring & Crash Reporting with Raygun\n href: xref:Monitoring.Raygun\n\n - name: Contributing\n items:\n - name: Overview\n href: uno-development/contributing-intro.md\n - name: Code of Conduct\n href: https://github.com/unoplatform/uno/blob/master/CODE_OF_CONDUCT.md\n - name: Ways to contribute\n href: uno-development/ways-to-contribute.md\n - name: How Uno works\n items:\n - name: Overview\n href: uno-development/uno-internals-overview.md\n - name: Android\n href: uno-development/uno-internals-android.md\n - name: Layouting in Android\n href: uno-development/Uno-UI-Layouting-Android.md\n - name: iOS\n href: uno-development/uno-internals-ios.md\n - name: Layouting in iOS\n href: uno-development/Uno-UI-Layouting-iOS.md\n - name: WASM\n href: uno-development/uno-internals-wasm.md\n - name: macOS\n href: uno-development/uno-internals-macos.md\n - name: DependencyProperty backing generator\n href: uno-development/Internal-DependencyProperty-Generator.md\n - name: x:Bind in Uno Platform\n href: uno-development/Uno-UI-xBind-architecture.md\n - name: ListViewBase\n href: uno-development/listviewbase-internals.md\n - name: ThemeResource\n href: uno-development/themeresource-internals.md\n - name: InteractionTracker internals\n href: uno-development/interaction-tracker-internals.md\n - name: Building Uno.UI\n href: uno-development/building-uno-ui.md\n - name: Debugging Uno.UI\n href: uno-development/debugging-uno-ui.md\n - name: Using Codespaces\n href: features/working-with-codespaces.md\n - name: Using Gitpod\n href: features/working-with-gitpod.md\n - name: Inspecting the visual tree\n href: uno-development/debugging-inspect-visual-tree.md\n - name: Guidelines for code style\n href: contributing/guidelines/code-style.md\n - name: Using the SamplesApp\n href: uno-development/working-with-the-samples-apps.md\n - name: Guidelines for creating tests\n items:\n - name: Overview\n href: contributing/guidelines/creating-tests.md\n - name: Mocked unit tests (Uno.UI.Tests)\n href: uno-development/creating-mocked-tests.md\n - name: Platform-runtime unit tests (Uno.UI.RuntimeTests)\n href: uno-development/creating-runtime-tests.md\n - name: UI tests with Uno.UITest\n href: uno-development/creating-ui-tests.md\n - name: Debugging Solution Templates\n href: uno-development/debugging-templates.md\n - name: Conventional Commits format\n href: uno-development/git-conventional-commits.md\n - name: Guidelines for pull-requests\n href: contributing/guidelines/pull-requests.md\n - name: Guidelines for breaking changes\n href: contributing/guidelines/breaking-changes.md\n - name: Guidelines for updating dependencies\n href: contributing/guidelines/updating-dependencies.md\n - name: Guidelines for issue triage\n href: contributing/guidelines/issue-triage.md\n - name: Guidelines for implementing a new WinUI/WinRT feature\n href: contributing/guidelines/implementing-a-new-winui-winrt-feature.md\n - name: Adding documentation\n href: uno-development/doc-on-docs.md\n - name: DocFX\n href: uno-development/docfx.md\n - name: Uno.UI release procedure\n href: uno-development/release-procedure.md\n - name: Build Artifacts\n href: contributing/build-artifacts.md\n - name: Advanced topics\n items:\n - name: Adding functionality with API extensions\n href: uno-development/api-extensions.md\n - name: The WeakEventHelper class\n href: uno-development/Internal-WeakEventHelper.md\n - name: Debugging Uno.UI Java code with Android studio\n href: uno-development/Uno-UI-Debugging-Android-Studio.md\n - name: Troubleshooting Memory Issues\n href: uno-development/troubleshooting-memory-issues.md\n - name: Troubleshooting Source Generation\n href: uno-development/troubleshooting-source-generation.md\n - name: The XAML Trimming phase\n href: uno-development/Uno-UI-XAML-ResourceTrimming.md\n - name: Hot Reload internals\n href: xref:Uno.Contributing.Internals.HotReload\n - name: Why use Uno Platform?\n href: xref:Uno.Overview.WhyUno\n - name: FAQ\n href: xref:Uno.Development.FAQ\n\n- name: Studio\n items:\n - name: Overview\n href: xref:Uno.Platform.Studio.Overview\n - name: Hot Reload\n href: xref:Uno.Platform.Studio.HotReload.Overview\n - name: Hot Design\n href: xref:Uno.HotDesign.Overview\n items:\n - name: Overview\n href: xref:Uno.HotDesign.Overview\n - name: Getting Started with Hot Design\n href: xref:Uno.HotDesign.GetStarted.Guide\n - name: Counter Tutorial\n href: xref:Uno.HotDesign.GetStarted.CounterTutorial\n - name: Design-to-Code\n href: external/figma-docs/toc.yml\n - name: Feedback\n href: xref:Uno.Platform.Studio.Feedback\n\n- name: Reference\n items:\n - name: Overview\n items:\n - name: Developing with Uno Platform\n href: xref:Uno.Development.Overview\n - name: Features List\n href: supported-features.md\n - name: Feature Flags\n href: feature-flags.md\n - name: WinUI Compatibility\n items:\n - name: Differences From WinUI\n href: api-differences.md\n - name: About NotImplemented members\n href: uno-notimplemented-types.md\n - name: Uno and WinUI 3\n href: uwp-vs-winui3.md\n - name: WinAppSDK Specifics\n href: features/winapp-sdk-specifics.md\n - name: WinUI links\n items:\n - name: Overview\n href: winui-doc-links.md\n - name: Tutorials\n href: winui-doc-links-tutorials.md\n - name: Development\n href: winui-doc-links-development.md\n - name: Design\n href: winui-doc-links-design.md\n\n - name: UI\n items:\n - name: Controls\n items:\n - name: Implemented Views\n href: implemented/toc.yml\n topicHref: implemented-views.md\n - name: Uno Specifics\n items:\n - name: AdaptiveTrigger\n href: xref:Uno.Features.AdaptiveTrigger\n - name: Activities in Android\n href: android-activities.md\n - name: ComboBox\n href: controls/ComboBox.md\n - name: CommandBar\n href: controls/CommandBar.md\n - name: DatePicker\n href: controls/DatePicker.md\n - name: ElevatedView\n href: features/ElevatedView.md\n - name: Fluent icon font\n href: uno-fluent-assets.md\n - name: Flyout\n href: controls/Flyout.md\n - name: Frame\n href: controls/Frame.md\n - name: GLCanvasElement\n href: xref:Uno.Controls.GLCanvasElement\n - name: Image\n href: xref:Uno.Features.Image\n - name: ListView and GridView\n href: controls/ListViewBase.md\n - name: Lottie animations\n href: features/Lottie.md\n - name: MapControl\n href: controls/map-control-support.md\n - name: MediaPlayerElement\n href: controls/MediaPlayerElement.md\n - name: MenuFlyout\n href: controls/MenuFlyout.md\n - name: NavigationView\n href: controls/NavigationView.md\n - name: Native control styles\n href: native-styles.md\n - name: Other features\n href: xref:Uno.Development.AdditionalFeatures\n - name: Pivot\n href: controls/Pivot.md\n - name: Popup\n href: controls/Popup.md\n - name: ProgressRing\n href: features/progressring.md\n - name: RefreshContainer (Pull to Refresh)\n href: controls/RefreshContainer.md\n - name: ScrollViewer\n href: controls/ScrollViewer.md\n - name: SKCanvasElement\n href: xref:Uno.Controls.SKCanvasElement\n - name: TextBox\n href: controls/TextBox.md\n - name: TimePicker\n href: controls/TimePicker.md\n - name: ToggleSwitch\n href: controls/ToggleSwitch.md\n - name: URI Protocol activation\n href: features/protocol-activation.md\n - name: Using pointer cursors\n href: features/cursors.md\n - name: Using SVG images\n href: features/svg.md\n - name: VisibleBoundsPadding\n href: xref:Uno.Features.VisibleBoundsPadding\n - name: WebView (WebView2)\n href: controls/WebView.md\n\n - name: Features\n items:\n - name: Accessibility\n href: features/working-with-accessibility.md\n - name: Animations\n href: features/working-with-animations.md\n - name: Assets and image display\n href: features/working-with-assets.md\n - name: Build telemetry\n href: uno-toolchain-telemetry.md\n - name: Composition API\n href: composition.md\n - name: Dialogs\n href: features/dialogs.md\n - name: Fluent styles\n href: features/fluent-styles.md\n - name: Focus management\n href: features/focus-management.md\n - name: Fonts\n href: features/custom-fonts.md\n - name: Markup Extensions\n href: features/windows-ui-markup-extensions.md\n - name: Native frame navigation\n href: features/native-frame-nav.md\n - name: Orientation\n href: features/orientation.md\n - name: Routed Events\n href: features/routed-events.md\n - name: Shapes & Brushes\n href: features/shapes-and-brushes.md\n - name: String resources and localization\n href: features/working-with-strings.md\n - name: Succinct syntax\n href: features/windows-ui-succinct-syntax.md\n - name: Themes\n href: features/working-with-themes.md\n - name: User inputs - Keyboard, Pointers, Gestures, Manipulations, Drag and drop\n href: features/pointers-keyboard-and-other-user-inputs.md\n - name: Using Fluent styles in legacy apps\n href: features/using-winui2.md\n - name: Windowing\n href: features/windows-ui-xaml-window.md\n - name: Preventing Window Closing\n href: features/app-close-handler.md\n - name: x:Bind\n href: features/windows-ui-xbind.md\n\n - name: 3rd-party libraries\n href: supported-libraries.md\n\n - name: Non-UI\n items:\n - name: Accelerometer\n href: features/accelerometer.md\n - name: Android TV\n href: features/android-tv.md\n - name: App Actions\n href: features/windows-ui-startscreen.md\n - name: App Suspension\n href: features/windows-ui-xaml-application.md\n - name: Application Data and Settings\n href: features/applicationdata.md\n - name: Badge Notifications\n href: features/windows-ui-notifications.md\n - name: Barometer\n href: features/barometer.md\n - name: Battery Information\n href: features/windows-system-power.md\n - name: Bluetooth\n href: features/bluetoothdevice.md\n - name: Capture (Camera)\n href: features/windows-media-capture.md\n - name: Clipboard\n href: features/clipboard.md\n - name: Compass\n href: features/compass.md\n - name: Contacts\n href: features/windows-applicationmodel-contacts.md\n - name: Credential Storage\n href: features/PasswordVault.md\n - name: Device Enumeration\n href: features/windows-devices-enumeration.md\n - name: Device Information\n href: features/windows-system-profile.md\n - name: E-mail\n href: features/windows-applicationmodel-email.md\n - name: File and Folder Pickers\n href: features/windows-storage-pickers.md\n - name: File Management\n href: features/file-management.md\n - name: Flashlight\n href: features/flashlight.md\n - name: Gamepad\n href: features/gamepad.md\n - name: Geolocation (GPS)\n href: features/windows-devices-geolocation.md\n - name: Gyrometer\n href: features/gyrometer.md\n - name: Haptics\n href: features/windows-devices-haptics.md\n - name: Hardware back button\n href: features/hardware-back-button.md\n - name: Keeping Screen On\n href: features/windows-system-display.md\n - name: Light Sensor\n href: features/lightsensor.md\n - name: Magnetometer\n href: features/magnetometer.md\n - name: MIDI\n href: features/windows-devices-midi.md\n - name: Network Information\n href: features/windows-networking.md\n - name: Orientation Sensor\n href: features/orientation-sensor.md\n - name: Package Information\n href: features/windows-applicationmodel.md\n - name: Phone Calls\n href: features/windows-applicationmodel-calls.md\n - name: Proximity Sensor\n href: features/proximity-sensor.md\n - name: Settings\n href: features/settings.md\n - name: Sharing\n href: features/windows-applicationmodel-datatransfer.md\n - name: SMS\n href: features/windows-applicationmodel-chat.md\n - name: Speech Recognition\n href: features/SpeechRecognition.md\n - name: Step Counter\n href: features/step-counter.md\n - name: Store Context\n href: features/windows-ui-storecontext.md\n - name: Title Bar Customization\n href: features/windows-ui-viewmanagement.md\n - name: URI Launcher\n href: features/windows-system.md\n - name: Vibration\n href: features/windows-phone-devices-notification-vibrationdevice.md\n - name: Wallpaper and Lock Screen\n href: features/windows-system-userprofile.md\n - name: Web Authentication Broker\n href: features/web-authentication-broker.md\n - name: Working with cookies\n href: features/working-with-cookies.md\n\n- name: Extensions\n # topicHref: external/uno.extensions/doc/ExtensionsOverview.md\n href: external/uno.extensions/doc/toc.yml\n\n- name: Themes\n # topicHref: external/uno.themes/doc/themes-overview.html\n href: external/uno.themes/doc/toc.yml\n\n- name: Toolkit\n #topicHref: external/uno.toolkit.ui/doc/getting-started.md\n href: external/uno.toolkit.ui/doc/toc.yml\n\n- name: Tooling\n items:\n - name: Uno WebAssembly Bootstrapper\n href: external/uno.wasm.bootstrap/doc/toc.yml\n - name: Uno Check\n href: external/uno.check/doc/toc.yml\n - name: Xaml Merging\n href: external/uno.xamlmerge.task/doc/toc.yml\n - name: Uno Resizetizer\n href: external/uno.resizetizer/doc/toc.yml\n - name: Uno.UITest\n href: external/uno.uitest/doc/toc.yml\n - name: VS Code Extension\n items:\n - name: Visual Studio Code Extension\n href: xref:Uno.vscode.additional\n - name: C# Dev Kit Support\n href: xref:Uno.GetStarted.vscode.DevKit\n - name: Omnisharp Support\n href: xref:Uno.GetStarted.vscode.OmniSharp\n - name: Rider\n items:\n - name: License Agreement\n href: xref:Uno.Rider.EULA\n - name: Templates\n items:\n - name: Visual Studio Wizard\n href: getting-started/wizard/using-wizard.md\n - name: dotnet new Templates\n href: get-started-dotnet-new.md\n - name: Get started wizard\n href: xref:Uno.GetStarted.Wizard\n
|
dataset_sample\yaml\unoplatform_uno\doc\articles\toc.yml
|
toc.yml
|
YAML
| 27,855 | 0.95 | 0.031421 | 0.004231 |
react-lib
| 457 |
2024-02-29T15:08:27.679944
|
GPL-3.0
| false |
604581572cfce55b8d5f15539071d4ef
|
- name: Overview\n href: silverlight-migration-landing.md\n- name: Silverlight to Uno Migration\n href: 00-overview.md\n- name: Create the Uno solution for UWP and WASM\n href: 01-create-uno-solution.md\n- name: Considering navigation\n href: 02-considering-navigation.md\n- name: Reviewing the app startup\n href: 03-review-app-startup.md\n- name: Migrating the home page XAML and styles\n href: 04-migrate-home-page-xaml-and-styles.md\n- name: Switching to string resources\n href: 05-string-resources.md\n- name: Dialogs and errors\n href: 07-dialogs.md\n- name: Data access services\n href: 08-data-access-overview.md\n- name: Client Authentication\n href: 09-client-auth-service.md\n- name: Implementing a singleton token service\n href: 10-implementing-singleton-token-service.md\n- name: Implementing an identity service client\n href: 11-implementing-identity-service-client.md\n- name: Migrating the authentication UI\n href: 12-migrate-auth-ui.md\n- name: Integrating authentication and navigation\n href: 13-integrating-auth-and-navigation.md\n- name: Implement the time entry service\n href: 14-implement-timeentry-services.md\n- name: Migrating the time entry UI\n href: 15-migrate-timeentry-ui.md\n- name: Wrap-up\n href: 20-wrap-up.md\n- name: The TimeEntry Sample apps\n href: 98-timeentry-samples.md\n- name: Useful resources\n href: 99-useful-resources.md\n
|
dataset_sample\yaml\unoplatform_uno\doc\articles\guides\silverlight-migration\toc.yml
|
toc.yml
|
YAML
| 1,356 | 0.7 | 0.026316 | 0 |
react-lib
| 202 |
2024-02-12T06:42:52.053623
|
Apache-2.0
| false |
5abfc0023c8c0b69f1eb1812cdfaba7a
|
- name: Overview\n href: xref:Uno.XamarinFormsMigration.Overview\n- name: Uno Platform for Xamarin.Forms developers\n href: xref:Uno.XamarinFormsMigration.Intro\n- name: Migrating Animations\n href: https://platform.uno/blog/migrating-animations-from-xamarin-forms-to-uno-platform/\n- name: Migrating Custom Controls\n href: https://platform.uno/blog/porting-custom-controls-from-xamarin-forms-to-uno-platform/\n- name: Migrating Custom-Drawn Controls\n href: https://platform.uno/blog/porting-a-custom-drawn-xamarin-forms-control-to-uno-platform/\n- name: Migrating Data Binding\n href: https://platform.uno/blog/xamarin-forms-migration-to-uno-platform-data-binding-techniques/\n- name: Migrating Effects\n href: https://platform.uno/blog/xamarin-forms-migration-to-uno-platform-effects-and-alternative-approaches/\n- name: Migrating Navigation\n href: https://platform.uno/blog/migrating-page-navigation-apps-from-xamarin-forms/\n- name: Migrating Renderers\n href: https://platform.uno/blog/hosting-native-controls-moving-from-xamarin-forms-renderers-to-uno-platform-controls/
|
dataset_sample\yaml\unoplatform_uno\doc\articles\guides\xf-migration\toc.yml
|
toc.yml
|
YAML
| 1,071 | 0.8 | 0.058824 | 0 |
python-kit
| 173 |
2025-02-03T20:57:31.092988
|
GPL-3.0
| false |
09ea9f427f4af646f6817f4a681630d7
|
trigger:\n batch: true\n branches:\n include:\n - master\n - dev*\n - refs/tags/*\n\npool:\n vmImage: "ubuntu-latest"\n\nvariables:\n - group: GithubToken\n\nsteps:\n - checkout: self\n - task: GoTool@0\n inputs:\n version: "1.15.2"\n - script: |\n go version\n go mod download\n workingDirectory: $(system.defaultWorkingDirectory)\n displayName: "Fetch sources"\n - script: |\n bazel build --action_env=PATH=$PATH --action_env=GOPATH=$(go env GOPATH) --action_env=GOCACHE=$(go env GOCACHE) --action_env=SPWD=$(pwd) --spawn_strategy local //release:all\n workingDirectory: $(system.defaultWorkingDirectory)\n displayName: "Build Binaries"\n - script: |\n echo $RELEASE_TAG\n ./release/bleedingrelease.sh\n workingDirectory: $(system.defaultWorkingDirectory)\n displayName: "Generate Bleeding Edge Release"\n env:\n WORKDIR: $(system.defaultWorkingDirectory)\n GITHUB_TOKEN: $(GITHUB_TOKEN)\n PRERELEASE: true\n RELEASE_TAG: unstable-$(Build.SourceVersion)\n RELEASE_SHA: $(Build.SourceVersion)\n TRIGGER_REASON: $(Build.SourceBranch)\n GITHUB_REPO_OWNER: v2fly\n GITHUB_REPO_NAME: v2ray-core\n - script: |\n echo $RELEASE_TAG\n ./release/tagrelease.sh\n workingDirectory: $(system.defaultWorkingDirectory)\n displayName: "Generate Tag Release"\n env:\n WORKDIR: $(system.defaultWorkingDirectory)\n GITHUB_TOKEN: $(GITHUB_TOKEN)\n PRERELEASE: true\n RELEASE_TAG: unstable-$(Build.SourceVersion)\n RELEASE_SHA: $(Build.SourceVersion)\n TRIGGER_REASON: $(Build.SourceBranch)\n
|
dataset_sample\yaml\v2ray_v2ray-core\azure-pipelines.yml
|
azure-pipelines.yml
|
YAML
| 1,590 | 0.8 | 0 | 0 |
awesome-app
| 706 |
2023-08-08T08:19:32.214322
|
MIT
| false |
88572555e8a212cb5269319986b5b8d8
|
stages:\n - check\n - build-nightly\n - build\n - publish\n\nvariables:\n # Note: this is deprecated!\n # https://docs.gitlab.com/ee/ci/yaml/#git-strategy\n # However in gitlab web ui it's set to fetch so it should be fine ¯\_(ツ)_/¯\n GIT_STRATEGY: fetch\n # Note: this is deprecated!\n # https://docs.gitlab.com/ee/ci/yaml/#shallow-cloning\n GIT_DEPTH: 3\n GIT_CLEAN_FLAGS: -f\n CACHE_IMAGE_TAG: 2cca2eaa\n TAG_REGEX: '/^v[0-9]+\.[0-9]+\.[0-9]+$/'\n\ndefault:\n # https://docs.gitlab.com/ee/ci/pipelines/settings.html#auto-cancel-pending-pipelines\n interruptible: true\n # Retry automatically in case the runner times out or there's a runner failure\n retry:\n max: 2\n when:\n - runner_system_failure\n - stuck_or_timeout_failure\n tags:\n - veloren/veloren\n\n# NOTE: overriden for the `.tmacos` template so relevant changes need to be mirrored there\nbefore_script:\n - source $HOME/.cargo/env\n - df -h /\n - free -h\n - cargo --version\n - source ./.gitlab/scripts/env.sh\n - rm -rf target || echo "it seems that sometimes OLD data is left over"\n\n# 8866215 is the user that is used to sync data to the collaboration repos\nworkflow:\n rules:\n - if: $CI_MERGE_REQUEST_IID\n - if: $CI_COMMIT_TAG\n - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $GITLAB_USER_ID != "8866215"\n - if: $CI_COMMIT_REF_NAME =~ /^r[0-9]+\.[0-9]+/ && $GITLAB_USER_ID != "8866215"\n\ninclude:\n - local: .gitlab/CI/recompile.yml\n - local: .gitlab/CI/release.yml\n - local: .gitlab/CI/check.gitlab-ci.yml\n - local: .gitlab/CI/build.gitlab-ci.yml\n - local: .gitlab/CI/publish.gitlab-ci.yml\n
|
dataset_sample\yaml\veloren_veloren\.gitlab-ci.yml
|
.gitlab-ci.yml
|
YAML
| 1,590 | 0.8 | 0.09434 | 0.1875 |
node-utils
| 912 |
2023-12-31T02:14:14.780273
|
BSD-3-Clause
| false |
c2ad06b3a7687169f7ee6b9a38cb6811
|
# SPDX-FileCopyrightText: 2025 Javier Pérez\n#\n# SPDX-License-Identifier: CC0-1.0\n\nname: Mirror GitLab repository\n\non:\n schedule:\n - cron: "0 * * * *"\n\njobs:\n mirror-gitlab-repo:\n uses: veloren/.github/.github/workflows/mirror.yml@main\n with:\n repository-url: https://gitlab.com/veloren/veloren.git\n rewrite-lfs: true\n secrets:\n token: ${{ secrets.MIRROR_TOKEN_GITHUB }}\n
|
dataset_sample\yaml\veloren_veloren\.github\workflows\mirror.yml
|
mirror.yml
|
YAML
| 400 | 0.8 | 0 | 0.2 |
python-kit
| 746 |
2024-10-07T07:17:25.989302
|
Apache-2.0
| false |
cb29cc05fc32dd4e7e1cc38e64a57125
|
# SPDX-FileCopyrightText: 2025 Javier Pérez\n#\n# SPDX-License-Identifier: CC0-1.0\n\nname: Decommission pull requests on GitHub\n\non:\n pull_request_target:\n types:\n - opened\n\njobs:\n close-pull-request:\n uses: veloren/.github/.github/workflows/no-pr.yml@main\n permissions:\n pull-requests: write\n
|
dataset_sample\yaml\veloren_veloren\.github\workflows\no-pr.yml
|
no-pr.yml
|
YAML
| 312 | 0.8 | 0 | 0.230769 |
awesome-app
| 959 |
2025-01-07T08:55:45.317802
|
Apache-2.0
| false |
3132e85428b5f9da7aa7577787668705
|
unittests:\n extends: .recompile-branch\n stage: build\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/quality:${CACHE_IMAGE_TAG}\n tags: ["veloren/veloren", "check"]\n variables:\n GIT_DEPTH: 9999999999999\n script:\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/unittest.sh\n - source ./.gitlab/scripts/unittest.sh\n retry:\n max: 2\n\ntranslation:\n extends: .release\n stage: build\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/quality:${CACHE_IMAGE_TAG}\n tags: ["veloren/veloren", "check"]\n script:\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/translation.sh\n - source ./.gitlab/scripts/translation.sh\n - TAGUUID="Z$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)" || echo "ignore this returncode, dont ask me why, it works"\n - echo $TAGUUID # Use TAGUUID to mitigate https://xkcd.com/327/ in the branch name\n - echo 'SET veloren.timestamp = "'"$(git show --no-patch --no-notes --pretty='%cd' HEAD)"'";' > upload.sql\n - echo "SET veloren.branch = \$${TAGUUID}\$${CI_COMMIT_REF_NAME}\$${TAGUUID}\$;" >> upload.sql\n - echo "SET veloren.sha = \$${TAGUUID}\$${CI_COMMIT_SHA}\$${TAGUUID}\$;" >> upload.sql\n - echo '\copy translations ("country_code", "file_name", "translation_key", "status", "git_commit") from '"'translation_analysis.csv' csv header" >> upload.sql\n - cat upload.sql\n - PGPASSWORD="${CIDBPASSWORD}" PGSSLROOTCERT="./.gitlab/ci-db.crt" psql "sslmode=verify-ca host=cidb.veloren.net port=30432 dbname=translations" -U hgseehzjtsrghtjdcqw -f upload.sql;\n retry:\n max: 2\n\nbenchmarks:\n extends: .release\n stage: build\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/bench:${CACHE_IMAGE_TAG}\n tags: ["veloren/veloren", "check", "benchmark"]\n script:\n - unset DISABLE_GIT_LFS_CHECK\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/benchmark.sh\n - source ./.gitlab/scripts/benchmark.sh\n - TAGUUID="Z$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)" || echo "ignore this returncode, dont ask me why, it works"\n - echo $TAGUUID # Use TAGUUID to mitigate https://xkcd.com/327/ in the branch name\n - echo 'SET veloren.timestamp = "'"$(git show --no-patch --no-notes --pretty='%cd' HEAD)"'";' > upload.sql\n - echo "SET veloren.branch = \$${TAGUUID}\$${CI_COMMIT_REF_NAME}\$${TAGUUID}\$;" >> upload.sql\n - echo "SET veloren.sha = \$${TAGUUID}\$${CI_COMMIT_SHA}\$${TAGUUID}\$;" >> upload.sql\n - find target/criterion -wholename "*new/*.csv" -exec echo '\copy benchmarks ("group", "function", "value", throughput_num, throughput_type, sample_measured_value, unit, iteration_count) from '"'{}' csv header" >> upload.sql \;\n - cat upload.sql\n - PGPASSWORD="${CIDBPASSWORD}" PGSSLROOTCERT="./.gitlab/ci-db.crt" psql "sslmode=verify-ca host=cidb.veloren.net port=30432 dbname=benchmarks" -U hgseehzjtsrghtjdcqw -f upload.sql;\n retry:\n max: 2\n\n# Coverage is needed on master for the README.md badge to work\n# tmp remove simd as it doesnt work with tarpaulin: https://github.com/rust-lang/rust/issues/77529\n\ncoverage:\n extends: .release\n stage: build\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/tarpaulin:${CACHE_IMAGE_TAG}\n coverage: '/^\d+.\d+% coverage/'\n tags: ["veloren/veloren", "check"]\n script:\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/coverage.sh\n - source ./.gitlab/scripts/coverage.sh\n retry:\n max: 2\n\n#linux, windows, macos builds here as template\n.tlinux-x86_64:\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/release-linux-x86_64:${CACHE_IMAGE_TAG}\n script:\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/linux-x86_64.sh\n - source ./.gitlab/scripts/linux-x86_64.sh\n - cp -r target/release/veloren-server-cli $CI_PROJECT_DIR\n - cp -r target/release/veloren-voxygen $CI_PROJECT_DIR\n artifacts:\n paths:\n - veloren-server-cli\n - veloren-voxygen\n - assets/\n - LICENSE\n expire_in: 1 week\n\n.tlinux-aarch64:\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/release-linux-aarch64:${CACHE_IMAGE_TAG}\n script:\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/linux-aarch64.sh\n - source ./.gitlab/scripts/linux-aarch64.sh\n - cp -r target/aarch64-unknown-linux-gnu/release/veloren-server-cli $CI_PROJECT_DIR\n - cp -r target/aarch64-unknown-linux-gnu/release/veloren-voxygen $CI_PROJECT_DIR\n artifacts:\n paths:\n - veloren-server-cli\n - veloren-voxygen\n - assets/\n - LICENSE\n expire_in: 1 week\n\n.twindows-x86_64:\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/release-windows-x86_64:${CACHE_IMAGE_TAG}\n script:\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/windows-x86_64.sh\n - source ./.gitlab/scripts/windows-x86_64.sh\n - cp -r target/x86_64-pc-windows-gnu/release/veloren-server-cli.exe $CI_PROJECT_DIR\n - cp -r target/x86_64-pc-windows-gnu/release/veloren-voxygen.exe $CI_PROJECT_DIR\n - cp /usr/lib/gcc/x86_64-w64-mingw32/13-posix/libgcc_s_seh-1.dll $CI_PROJECT_DIR\n - cp /usr/lib/gcc/x86_64-w64-mingw32/13-posix/libstdc++-6.dll $CI_PROJECT_DIR\n - cp /usr/x86_64-w64-mingw32/lib/libwinpthread-1.dll $CI_PROJECT_DIR\n artifacts:\n paths:\n - veloren-server-cli.exe\n - veloren-voxygen.exe\n - assets/\n - LICENSE\n - libgcc_s_seh-1.dll\n - libstdc++-6.dll\n - libwinpthread-1.dll\n expire_in: 1 week\n\n# Use a yaml anchor to allow the same before_script to be used in both .tmacos-x86_64 and .tmacos-aarch64\n# https://docs.gitlab.com/ee/ci/yaml/yaml_optimization.html#yaml-anchors-for-scripts\n.tmacos-before-script: &tmacos-before-script\n - source $HOME/.cargo/env\n - cargo --version\n - rustup toolchain list\n - export DISABLE_GIT_LFS_CHECK=true\n - export VELOREN_ASSETS="$(pwd)/assets"\n - echo "VELOREN_ASSETS=$VELOREN_ASSETS"\n - export RUSTFLAGS="-D warnings"\n - export CARGO_INCREMENTAL=0\n\n.tmacos:\n tags: ["veloren-macos"]\n script:\n - export MACOSX_DEPLOYMENT_TARGET="10.13"\n - export VELOREN_USERDATA_STRATEGY=executable\n - cargo build --profile ${PROFILE} --no-default-features --features default-publish --target $RUST_TARGET\n - cp -r target/$RUST_TARGET/${PROFILE}/veloren-server-cli $CI_PROJECT_DIR\n - cp -r target/$RUST_TARGET/${PROFILE}/veloren-voxygen $CI_PROJECT_DIR\n artifacts:\n paths:\n - veloren-server-cli\n - veloren-voxygen\n - assets/\n - LICENSE\n expire_in: 1 week\n\n.tmacos-x86_64:\n extends:\n - .tmacos\n before_script:\n # Using fat LTO on macos_x86_64 caused timeouts in the build pipeline, overriding it to thin here fixes this\n - export PROFILE="release-thinlto"\n - *tmacos-before-script\n - export RUST_TARGET="x86_64-apple-darwin"\n\n.tmacos-aarch64:\n extends:\n - .tmacos\n before_script:\n - export PROFILE="release"\n - *tmacos-before-script\n - rustup target add aarch64-apple-darwin\n - export RUST_TARGET="aarch64-apple-darwin"\n\n# build on release or master\nlinux-x86_64:\n extends:\n - .tlinux-x86_64\n - .release\n tags: ["veloren/veloren", "build", "publish", "trusted"]\n\nlinux-aarch64:\n extends:\n - .tlinux-aarch64\n - .release\n tags: ["veloren/veloren", "build", "publish", "trusted"]\n\nwindows-x86_64:\n extends:\n - .twindows-x86_64\n - .release\n tags: ["veloren/veloren", "build", "publish", "trusted"]\n\nmacos-x86_64:\n extends:\n - .tmacos-x86_64\n - .release\n tags: ["veloren/veloren:macos", "build", "publish", "trusted"]\n\nmacos-aarch64:\n extends:\n - .tmacos-aarch64\n - .release\n tags: ["veloren/veloren:macos", "build", "publish", "trusted"]\n\n# if NOT release or master, allow optional builds\nopt-linux-x86_64:\n extends:\n - .tlinux-x86_64\n - .optional-release\n tags: ["veloren/veloren", "build"]\n\nopt-linux-aarch64:\n extends:\n - .tlinux-aarch64\n - .optional-release\n tags: ["veloren/veloren", "build"]\n\nopt-windows-x86_64:\n extends:\n - .twindows-x86_64\n - .optional-release\n tags: ["veloren/veloren", "build"]\n\nopt-macos-x86_64:\n extends:\n - .tmacos-x86_64\n - .optional-release\n tags: ["veloren/veloren:macos", "build"]\n\nopt-macos-aarch64:\n extends:\n - .tmacos-aarch64\n - .optional-release\n tags: ["veloren/veloren:macos", "build"]\n
|
dataset_sample\yaml\veloren_veloren\.gitlab\CI\build.gitlab-ci.yml
|
build.gitlab-ci.yml
|
YAML
| 8,302 | 0.95 | 0.017167 | 0.037736 |
awesome-app
| 975 |
2024-12-21T03:12:54.344637
|
MIT
| false |
c0bfa06c2ebb67371dd6821828cb4734
|
code-quality:\n extends: .recompile-branch\n stage: check\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/quality:${CACHE_IMAGE_TAG}\n tags: ["veloren/veloren", "check"]\n variables:\n # Disables shallow cloning - full history of the source branch is needed for the 'git lfs fsck' command below.\n GIT_DEPTH: 0\n script:\n - echo $CI_COMMIT_TAG\n - echo $TAG_REGEX\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/code-quality.sh\n - source ./.gitlab/scripts/code-quality.sh\n # Brings the target branch into the job's scope - used to define the full range of commits between the target and merged result.\n - git fetch origin ${CI_MERGE_REQUEST_TARGET_BRANCH_NAME}:refs/remotes/origin/${CI_MERGE_REQUEST_TARGET_BRANCH_NAME}\n - git lfs fsck --pointers origin/${CI_MERGE_REQUEST_TARGET_BRANCH_NAME}..HEAD\n\nsecurity:\n extends: .recompile-branch\n stage: check\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/quality:${CACHE_IMAGE_TAG}\n tags: ["veloren/veloren", "check"]\n allow_failure: true\n script:\n - ln -s /dockercache/target target\n - cat ./.gitlab/scripts/security.sh\n - source ./.gitlab/scripts/security.sh\n
|
dataset_sample\yaml\veloren_veloren\.gitlab\CI\check.gitlab-ci.yml
|
check.gitlab-ci.yml
|
YAML
| 1,186 | 0.8 | 0.035714 | 0.074074 |
node-utils
| 157 |
2024-01-08T20:06:49.363446
|
MIT
| false |
a45b83c3b916a2bc2a55c21a7cb5f43f
|
# Publishes veloren-server-cli to the gitlab container registry\n# https://gitlab.com/veloren/veloren/container_registry\n.publish:\n stage: publish\n rules:\n - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n when: on_success\n - when: never\n\ndocker:\n extends: .publish\n image:\n name: gcr.io/kaniko-project/executor:debug\n entrypoint: [""]\n tags: ["veloren/veloren", "publish", "trusted"]\n dependencies:\n - linux-x86_64\n before_script:\n - ls "$CI_PROJECT_DIR/server-cli/"\n script:\n # Help kaniko identify that it is running in a container.\n # avoids this issue: https://github.com/GoogleContainerTools/kaniko/issues/1542\n - export container=docker\n - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json\n - |\n source "$CI_PROJECT_DIR/.gitlab/scripts/util.sh";\n publishdockertag;\n if [ "${PUBLISH_DOCKER_TAG}" != "" ]; then\n /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/server-cli/Dockerfile --destination "${CI_REGISTRY_IMAGE}/server-cli:${PUBLISH_DOCKER_TAG}"\n fi\n\ngittag:\n extends: .publish\n rules:\n - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule"\n image: bitnami/git:latest\n dependencies: []\n tags: ["veloren/veloren", "publish", "trusted"]\n before_script:\n - git --version\n - git lfs --version\n script:\n - git config --global user.email "[email protected]"\n - git config --global user.name "veloren-bot"\n - git config --global http.postBuffer 52428800\n - git remote set-url origin https://veloren-bot:${GITLAB_TOKEN_WRITE}@${CI_PROJECT_URL:8}\n - |\n if [[ ! "${SCHEDULE_CADENCE}" =~ ${TAG_REGEX} ]]; then\n git reflog expire --expire=now --all;\n git gc --prune=now;\n git fetch;\n git tag -a "${SCHEDULE_CADENCE}" -m '' -f;\n git push origin "${SCHEDULE_CADENCE}" -f || echo "failed pushed, trying again" && sleep 5 && git status && git fsck && git push origin "${SCHEDULE_CADENCE}" -f;\n fi\n\npages:\n extends: .publish\n rules:\n - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE != "schedule"\n image: registry.gitlab.com/veloren/veloren-docker-ci/cache/quality:${CACHE_IMAGE_TAG}\n dependencies: []\n tags: ["veloren/veloren", "publish"]\n artifacts:\n paths:\n - public\n script:\n - rm -rf public\n - mkdir -p public\n - RUSTDOCFLAGS="--enable-index-page -Zunstable-options" cargo doc --no-deps --document-private-items\n - mv target/doc/* public\n
|
dataset_sample\yaml\veloren_veloren\.gitlab\CI\publish.gitlab-ci.yml
|
publish.gitlab-ci.yml
|
YAML
| 2,593 | 0.8 | 0.071429 | 0.059701 |
python-kit
| 419 |
2024-08-23T02:05:51.144418
|
Apache-2.0
| false |
c5b20ae17469dfa1ed80945651a650f2
|
# Template to only run if actual changes has been made to the code and not just documentation\n.recompile-branch:\n rules:\n - if: $CI_PIPELINE_SOURCE == "merge_request_event"\n # No '-' here is *very* important: https://docs.gitlab.com/ee/ci/yaml/#complex-rule-clauses\n changes:\n - "**/*.{glsl,png,rs,ron,ftl,toml,vox,yml,wav,sh}"\n - "rust-toolchain"\n - "Cargo.lock"\n - ".gitlab-ci.yml"\n\n# TODO: appears unused\n# like .recompile-branch but will run on master too\n.recompile:\n rules:\n - changes:\n - "**/*.{glsl,png,rs,ron,ftl,toml,vox,yml,wav,sh}"\n - "rust-toolchain"\n - "Cargo.lock"\n - ".gitlab-ci.yml"\n
|
dataset_sample\yaml\veloren_veloren\.gitlab\CI\recompile.yml
|
recompile.yml
|
YAML
| 674 | 0.95 | 0.1 | 0.210526 |
awesome-app
| 420 |
2025-05-15T23:04:03.559454
|
BSD-3-Clause
| false |
2421381bfd80ab9c361bf5de4d3cc853
|
# allow_failure: true makes these pipelines manual and "non-blocking" which changed with except -> rule syntax\n.optional-release:\n stage: check\n rules:\n - if: $CI_PIPELINE_SOURCE != "merge_request_event" || $CI_PIPELINE_SOURCE == "schedule" || ( $CI_COMMIT_TAG != null && $CI_COMMIT_TAG =~ $TAG_REGEX )\n when: never\n - when: manual\n allow_failure: true\n\n# Template to only run if pushes to master, schedule or a version tag\n.release:\n stage: build\n rules:\n - if: $CI_PIPELINE_SOURCE != "merge_request_event" && ( $CI_PIPELINE_SOURCE == "schedule" || $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || ( $CI_COMMIT_TAG != null && $CI_COMMIT_TAG =~ $TAG_REGEX ) )\n when: on_success\n - when: never\n retry:\n max: 1
|
dataset_sample\yaml\veloren_veloren\.gitlab\CI\release.yml
|
release.yml
|
YAML
| 737 | 0.8 | 0.176471 | 0.117647 |
node-utils
| 655 |
2023-07-28T20:25:49.571539
|
MIT
| false |
6c9b1d162fa82545de25f32fe7ed81a4
|
version: "3.5"\n\nservices:\n game-server:\n image: registry.gitlab.com/veloren/veloren/server-cli:weekly\n stdin_open: true\n tty: true\n container_name: veloren-game-server-master\n ports:\n - "14004:14004"\n - "14005:14005"\n - "14006:14006/udp"\n restart: on-failure:0\n volumes:\n - "./userdata:/opt/userdata"\n environment:\n - RUST_LOG=debug,common::net=info\n watchtower:\n image: containrrr/watchtower\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n command: --interval 30 --stop-timeout 130s --cleanup veloren-game-server-master\n restart: on-failure:0\n
|
dataset_sample\yaml\veloren_veloren\server-cli\docker-compose.yml
|
docker-compose.yml
|
YAML
| 624 | 0.7 | 0 | 0 |
python-kit
| 114 |
2024-08-13T00:27:52.842998
|
Apache-2.0
| false |
dcd333257b40f2827134dd7fa6f2e3cc
|
# This file will create many PRs, one for each example.\n# From what I can tell, we are forced into approaching it this way\n# because of the many workspace roots.\n\nversion: 2\nupdates:\n - package-ecosystem: "npm"\n directory: "/examples/basic"\n schedule:\n interval: "weekly"\n labels:\n - "area: examples"\n open-pull-requests-limit: 1\n pull-request-branch-name:\n separator: "-"\n versioning-strategy: "increase"\n allow:\n - dependency-type: "direct"\n assignees:\n - "anthonyshew"\n groups:\n basic:\n patterns:\n - "*"\n update-types:\n - "minor"\n\n - package-ecosystem: "npm"\n directory: "/examples/kitchen-sink"\n schedule:\n interval: "weekly"\n labels:\n - "area: examples"\n open-pull-requests-limit: 1\n pull-request-branch-name:\n separator: "-"\n versioning-strategy: "increase"\n allow:\n - dependency-type: "direct"\n assignees:\n - "anthonyshew"\n groups:\n kitchen-sink:\n patterns:\n - "*"\n update-types:\n - "minor"\n ignore:\n # Types are just...broken?\n - dependency-name: "express"\n - dependency-name: "@types/express"\n # Remix is behind on updating these peer deps and npm doesn't like it\n - dependency-name: "react"\n - dependency-name: "react-dom"\n - dependency-name: "@types/react"\n - dependency-name: "@types/react-dom"\n # TODO: Investigate why upgrading breaks admin:build\n - dependency-name: "bunchee"\n # We can't upgrade versions of this due to npm\n - dependency-name: "vite"\n\n - package-ecosystem: "npm"\n directory: "/examples/non-monorepo"\n schedule:\n interval: "weekly"\n labels:\n - "area: examples"\n open-pull-requests-limit: 1\n pull-request-branch-name:\n separator: "-"\n versioning-strategy: "increase"\n allow:\n - dependency-type: "direct"\n assignees:\n - "anthonyshew"\n groups:\n non-monorepo:\n patterns:\n - "*"\n update-types:\n - "minor"\n\n - package-ecosystem: "npm"\n directory: "/examples/with-shell-commands"\n schedule:\n interval: "weekly"\n labels:\n - "area: examples"\n open-pull-requests-limit: 1\n pull-request-branch-name:\n separator: "-"\n versioning-strategy: "increase"\n allow:\n - dependency-type: "direct"\n assignees:\n - "anthonyshew"\n groups:\n with-shell-commands:\n patterns:\n - "*"\n update-types:\n - "minor"\n\n - package-ecosystem: "npm"\n directory: "/examples/with-svelte"\n schedule:\n interval: "weekly"\n labels:\n - "area: examples"\n open-pull-requests-limit: 1\n pull-request-branch-name:\n separator: "-"\n versioning-strategy: "increase"\n allow:\n - dependency-type: "direct"\n assignees:\n - "anthonyshew"\n groups:\n with-svelte:\n patterns:\n - "*"\n update-types:\n - "minor"\n\n - package-ecosystem: "npm"\n directory: "/examples/with-tailwind"\n schedule:\n interval: "weekly"\n labels:\n - "area: examples"\n open-pull-requests-limit: 1\n pull-request-branch-name:\n separator: "-"\n versioning-strategy: "increase"\n allow:\n - dependency-type: "direct"\n assignees:\n - "anthonyshew"\n groups:\n with-tailwind:\n patterns:\n - "*"\n update-types:\n - "minor"\n
|
dataset_sample\yaml\vercel_turborepo\.github\dependabot.yml
|
dependabot.yml
|
YAML
| 3,443 | 0.8 | 0.006944 | 0.050725 |
node-utils
| 338 |
2023-10-20T21:44:47.536766
|
MIT
| false |
9c74b3178d1a7d5eb9abcb549c45f9a1
|
# Used to auto generate turborepo release notes\n\nchangelog:\n exclude:\n labels:\n - "area: ci"\n - "release: turborepo"\n - "release-notes-ignore"\n categories:\n - title: Docs\n labels:\n - "area: docs"\n - "area: site"\n - title: create-turbo\n labels:\n - "pkg: create-turbo"\n - title: turbo-ignore\n labels:\n - "pkg: turbo-ignore"\n - title: "@turbo/codemod"\n labels:\n - "pkg: turbo-codemod"\n - title: "eslint"\n labels:\n - "pkg: turbo-eslint"\n - title: "@turbo/repository"\n labels:\n - "pkg: turbo-repository"\n - title: "@turbo/telemetry"\n labels:\n - "pkg: turbo-telemetry"\n - title: Examples\n labels:\n - "area: examples"\n - title: Changelog\n labels:\n - "*"\n
|
dataset_sample\yaml\vercel_turborepo\.github\release.yml
|
release.yml
|
YAML
| 809 | 0.8 | 0 | 0.027778 |
awesome-app
| 921 |
2023-07-13T07:11:13.193480
|
GPL-3.0
| false |
1250b089c5d75d91fec4baf534fe8fbc
|
# Docs\n\n# Label Config\n\n# labeler:\n# - settings:\n# - codeOwnersPath: {PATH TO CODEOWNERS FILE (defaults to .github/CODEOWNERS)}\n# - labels:\n# - label: {YOUR LABEL NAME}\n# condition: {AND (default) | OR}\n# when:\n# {TEST_FUNCTION}: {REGEX}\n# ...\n# ...\n\n#| Function Name | Description |\n#| --------------------------- | -------------------------------------------------------------------------- |\n#| `isAnyFilePathMatch` | Returns true if any filename in the PR diff matches the given regex |\n#| `isPRBodyMatch` | Returns true if the PR description matches the given regex |\n#| `isPRTitleMatch` | Returns true if the PR title matches the given regex |\n#| `isPRAuthorMatch` | Returns true if the PR author matches the given regex |\n#| `isPRAuthorCompanyMatch` | Returns true if the PR author's company matches the given regex |\n#| `isAnyFileOwnedByMatch` | Returns true if any owner of a file in the PR diff matches the given regex |\n#| `isNotAnyFilePathMatch` | The negation of `isAnyFilePathMatch` |\n#| `isNotPRBodyMatch` | The negation of `isPRBodyMatch` |\n#| `isNotPRTitleMatch` | The negation of `isPRTitleMatch` |\n#| `isNotPRAuthorMatch` | The negation of `isPRAuthorMatch` |\n#| `isNotPRAuthorCompanyMatch` | The negation of `isPRAuthorCompanyMatch` |\n#| `isNotAnyFileOwnerByMatch` | The negation of `isAnyFileOwnedByMatch` |\n\nlabeler:\n labels:\n # needs: triage when not any of the turborepo team\n - label: "needs: triage"\n when:\n isNotPRAuthorMatch: "^(padmaia|anthonyshew|dimitropoulos|tknickman|chris-olszewski|NicholasLYang)$"\n # Removes the PR from release notes when its chore or ci\n - label: "release-notes-ignore"\n when:\n isPRTitleMatch: "(\bchore\b|\bci\b).*?:"\n\n # areas\n - label: "area: ci"\n when:\n isAnyFilePathMatch: '^\.github\/(workflows|actions).*$'\n - label: "area: examples"\n when:\n isAnyFilePathMatch: '^examples\/.*$'\n - label: "area: docs"\n when:\n isAnyFilePathMatch: '^docs\/.*\.mdx$'\n - label: "area: site"\n when:\n isAnyFilePathMatch: '^docs\/.*\.(?!mdx).*$'\n\n # packages\n - label: "pkg: turbo-eslint"\n when:\n isAnyFilePathMatch: '^packages\/eslint-(plugin|config)-turbo\/.*$'\n - label: "pkg: turbo-ignore"\n when:\n isAnyFilePathMatch: '^packages\/turbo-ignore\/.*$'\n - label: "pkg: turbo-codemod"\n when:\n isAnyFilePathMatch: '^packages\/turbo-codemod\/.*$'\n - label: "pkg: create-turbo"\n when:\n isAnyFilePathMatch: '^packages\/create-turbo\/.*$'\n - label: "pkg: turbo-gen"\n when:\n isAnyFilePathMatch: '^packages\/turbo-gen\/.*$'\n - label: "pkg: turbo-workspaces"\n when:\n isAnyFilePathMatch: '^packages\/turbo-workspaces\/.*$'\n - label: "pkg: turbo-repository"\n when:\n isAnyFilePathMatch: '^packages\/turbo-repository\/.*$'\n - label: "pkg: turbo-telemetry"\n when:\n isAnyFilePathMatch: '^packages\/turbo-telemetry\/.*$'\n\n # release\n - label: "release: turborepo"\n when:\n isAnyFilePathMatch: '^version\.txt$'\n isPRTitleMatch: '^release\(turborepo\):.*$'\nevents:\n onPublish:\n turbo:\n - runWorkflow: bench-turborepo.yml\n when: any\n - runWorkflow: update-examples-on-release.yml\n when: latest\n
|
dataset_sample\yaml\vercel_turborepo\.github\turbo-orchestrator.yml
|
turbo-orchestrator.yml
|
YAML
| 3,795 | 0.8 | 0.064516 | 0.360465 |
react-lib
| 752 |
2025-05-18T12:00:20.033556
|
GPL-3.0
| false |
ba83d88fecbcab3994cee12eb9db2403
|
name: "cargo-sweep"\ndescription: "Runs cargo-sweep to clean old build artifacts"\nruns:\n using: "node20"\n main: "dist/main/index.js"\n post: "dist/post/index.js"\n
|
dataset_sample\yaml\vercel_turborepo\.github\actions\cargo-sweep\action.yml
|
action.yml
|
YAML
| 163 | 0.7 | 0 | 0 |
react-lib
| 804 |
2023-07-16T12:18:24.539116
|
BSD-3-Clause
| false |
33dfeb72617ff9eb4c6fe9e513139905
|
name: "Run Turbo Tests"\ndescription: "Setup the environment and run Turbo tests"\ninputs:\n github-token:\n description: "GitHub token for GitHub"\n required: true\n turbo-token:\n description: "Turbo token for authentication"\n required: true\n turbo-team:\n description: "Turbo team for authentication"\n required: true\n test-filter:\n description: "Filter for the turbo run command"\n required: true\nruns:\n using: "composite"\n steps:\n - name: Disable corepack\n shell: bash\n run: corepack disable\n\n - name: Setup Turborepo Environment\n uses: ./.github/actions/setup-turborepo-environment\n with:\n github-token: "${{ inputs.github-token }}"\n node-version: "22"\n\n - name: Install Global Turbo\n uses: ./.github/actions/install-global-turbo\n\n - name: Run Turbo Tests\n shell: bash\n run: |\n turbo run test \\n --filter="${{ inputs.test-filter }}" \\n --continue \\n --token=${{ inputs.turbo-token }} \\n --team=${{ inputs.turbo-team }} \\n --env-mode=strict \\n --concurrency=1\n
|
dataset_sample\yaml\vercel_turborepo\.github\actions\examples-tests\action.yml
|
action.yml
|
YAML
| 1,107 | 0.85 | 0.097561 | 0 |
node-utils
| 647 |
2024-08-25T06:03:41.758811
|
Apache-2.0
| true |
eabff326c3604a9c4b8852f9dd350e83
|
name: "Install Global Turbo"\ndescription: "Installs turbo globally. Expects Node and npm to already be installed"\n\ninputs:\n turbo-version:\n description: "Specify a Turbo version or tag (e.g., 2.0.5, latest, beta, canary)"\n required: false\n default: ""\n\nruns:\n using: "composite"\n steps:\n - name: Determine Turbo Version\n id: determine-version\n shell: bash\n run: |\n if [[ -n "${{ inputs.turbo-version }}" ]]; then\n VERSION="${{ inputs.turbo-version }}"\n else\n VERSION=$(npm view turbo --json | jq -r '.versions | map(select(test("^2\\."))) | last')\n echo "No version provided, using latest 2.x version: $VERSION"\n fi\n echo "TURBO_VERSION=$VERSION" >> $GITHUB_ENV\n\n - name: Install Turbo globally\n shell: bash\n run: |\n echo "Installing turbo@$TURBO_VERSION..."\n npm i -g turbo@$TURBO_VERSION\n
|
dataset_sample\yaml\vercel_turborepo\.github\actions\install-global-turbo\action.yml
|
action.yml
|
YAML
| 902 | 0.85 | 0.034483 | 0 |
awesome-app
| 561 |
2024-01-09T18:44:30.890583
|
Apache-2.0
| false |
1b37245b8a59dc9f1b8461e5de4af8a2
|
name: "Setup Capnproto"\ndescription: "Sets up capnproto for the current platform"\n\nruns:\n using: "composite"\n steps:\n - name: "Setup capnproto for Linux"\n if: runner.os == 'Linux'\n shell: bash\n run: sudo apt-get -y update && sudo apt-get install -y capnproto\n\n - name: "Setup capnproto for macos"\n if: runner.os == 'macOS'\n shell: bash\n run: brew install capnp\n\n - name: "Setup capnproto for Windows"\n if: runner.os == 'Windows'\n shell: bash\n run: choco install capnproto\n
|
dataset_sample\yaml\vercel_turborepo\.github\actions\setup-capnproto\action.yml
|
action.yml
|
YAML
| 528 | 0.7 | 0.35 | 0 |
node-utils
| 277 |
2024-11-16T18:42:24.094594
|
MIT
| false |
59a49ac5dbf334b244d584730e30a1b8
|
name: "Turborepo Node.js Setup"\ndescription: "Sets Node.js up for CI"\ninputs:\n enable-corepack:\n description: "Control turning on corepack."\n required: false\n default: "true"\n extra-flags:\n description: "Extra flags to pass to the pnpm install."\n required: false\n default: ""\n package-install:\n description: "Don't run the install step."\n required: false\n default: "true"\n node-version:\n description: "Node version to install"\n required: false\n default: "18"\n\nruns:\n using: "composite"\n steps:\n - name: Setup pnpm\n uses: pnpm/action-setup@v4\n\n - name: Setup Node.js\n uses: actions/setup-node@v4\n with:\n node-version: ${{ inputs.node-version }}\n cache: pnpm\n\n - name: Upgrade corepack\n if: ${{ inputs.enable-corepack == 'true' }}\n shell: bash\n # Forcibly upgrade our available version of corepack.\n # The bundled version in node 16 has known issues.\n # Prepends the npm bin dir so that it is always first.\n run: |\n npm install --force --global corepack@latest\n npm config get prefix >> $GITHUB_PATH\n\n - name: Configure corepack\n if: ${{ inputs.enable-corepack == 'true' }}\n shell: bash\n run: corepack enable\n\n - name: pnpm install\n id: install\n if: ${{ inputs.package-install == 'true' }}\n continue-on-error: true\n shell: bash\n run: pnpm install ${{ inputs.extra-flags }}\n\n - name: pnpm install (second try)\n if: ${{ steps.install.outcome == 'failure' }}\n shell: bash\n run: pnpm install ${{ inputs.extra-flags }}\n
|
dataset_sample\yaml\vercel_turborepo\.github\actions\setup-node\action.yml
|
action.yml
|
YAML
| 1,600 | 0.95 | 0.103448 | 0.057692 |
node-utils
| 757 |
2024-03-09T21:53:25.887239
|
GPL-3.0
| false |
600920518029845fb15a18b33f556121
|
name: "Turbo Rust Setup"\ndescription: "Sets up the Rust toolchain for CI"\ninputs:\n targets:\n description: "Comma-separated list of target triples to install for this toolchain"\n required: false\n github-token:\n description: "GitHub token. You can pass secrets.GITHUB_TOKEN"\n required: true\n shared-cache-key:\n description: "A cache key that is used instead of the automatic `job`-based key, and is stable over multiple jobs."\n required: false\n cache-key:\n description: "An additional cache key that is added alongside the automatic `job`-based cache key and can be used to further differentiate jobs."\n required: false\n save-cache:\n description: "Determiners whether the cache should be saved. If `false`, the cache is only restored."\n required: false\n default: "false"\n\nruns:\n using: "composite"\n steps:\n - name: "Setup Rust toolchain"\n uses: actions-rust-lang/setup-rust-toolchain@v1\n with:\n target: ${{ inputs.targets }}\n # needed to not make it override the defaults\n rustflags: ""\n # we want more specific settings\n cache: false\n\n - name: "Install LLD (LLVM Linker) for Linux"\n if: runner.os == 'Linux'\n shell: bash\n run: sudo apt-get -y update && sudo apt-get install -y lld\n\n - name: Set Up Protoc\n id: set-up-protoc\n continue-on-error: true\n uses: arduino/setup-protoc@v3\n with:\n version: "26.x"\n repo-token: ${{ inputs.github-token }}\n\n - name: Set Up Protoc (second try)\n if: steps.set-up-protoc.outcome == 'failure'\n uses: arduino/setup-protoc@v3\n with:\n version: "26.x"\n repo-token: ${{ inputs.github-token }}\n\n - name: "Add cargo problem matchers"\n shell: bash\n run: echo "::add-matcher::${{ github.action_path }}/matchers.json"\n\n - uses: rui314/setup-mold@v1\n\n - name: "Setup Rust Cache"\n uses: Swatinem/rust-cache@v2\n with:\n shared-key: ${{ inputs.shared-cache-key }}\n key: ${{ inputs.cache-key }}\n # the cache is huge and we only get 10gb max, so we only save on master\n save-if: ${{ github.ref == 'refs/heads/main' && inputs.save-cache || 'false' }}\n\n - name: "Install cargo-sweep"\n uses: taiki-e/install-action@v2\n env:\n GITHUB_TOKEN: ${{ inputs.github-token }}\n with:\n tool: [email protected],[email protected]\n\n - name: "Run cargo-sweep"\n uses: ./.github/actions/cargo-sweep\n
|
dataset_sample\yaml\vercel_turborepo\.github\actions\setup-rust\action.yml
|
action.yml
|
YAML
| 2,467 | 0.95 | 0.093333 | 0.045455 |
python-kit
| 457 |
2025-01-05T15:31:18.495245
|
MIT
| false |
ed6ddec0908f4ab8ec86a4902b582d6d
|
name: "Setup Turborepo Environment"\ndescription: "Sets up development environment for turborepo"\ninputs:\n github-token:\n description: "GitHub token. You can pass secrets.GITHUB_TOKEN"\n required: true\n node-version:\n description: Node version\n required: false\n default: "18"\n\nruns:\n using: "composite"\n steps:\n - name: "Setup Node"\n uses: ./.github/actions/setup-node\n with:\n extra-flags: --no-optional\n node-version: ${{ inputs.node-version }}\n env:\n PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1\n\n - name: "Setup Rust"\n uses: ./.github/actions/setup-rust\n with:\n shared-cache-key: turborepo-debug-build\n cache-key: ${{ inputs.target }}\n save-cache: true\n github-token: ${{ inputs.github-token }}\n\n - name: "Setup capnproto"\n uses: ./.github/actions/setup-capnproto\n
|
dataset_sample\yaml\vercel_turborepo\.github\actions\setup-turborepo-environment\action.yml
|
action.yml
|
YAML
| 863 | 0.85 | 0.03125 | 0 |
vue-tools
| 854 |
2023-07-15T19:50:08.283625
|
Apache-2.0
| false |
855bbca77508bcc013f1a239e30ceebb
|
body:\n - type: textarea\n attributes:\n label: Summary\n description: What do you need help with?\n validations:\n required: true\n - type: textarea\n attributes:\n label: Additional information\n description: Any code snippets, error messages, or dependency details that may be related?\n render: js\n validations:\n required: false\n - type: input\n attributes:\n label: Example\n description: A link to a minimal reproduction is helpful for collaborative debugging!\n validations:\n required: false\n
|
dataset_sample\yaml\vercel_turborepo\.github\DISCUSSION_TEMPLATE\help.yml
|
help.yml
|
YAML
| 555 | 0.85 | 0.05 | 0 |
node-utils
| 730 |
2024-02-08T03:07:58.421031
|
MIT
| false |
63073580b879664f692ae3e9ad747958
|
body:\n - type: textarea\n attributes:\n label: Goals\n description: Short list of the problems that the feature request aims to address.\n value: |\n 1.\n 2.\n 3.\n validations:\n required: true\n - type: textarea\n attributes:\n label: Non-goals\n description: Short list of what the feature request _does not_ aim to address.\n value: |\n 1.\n 2.\n 3.\n validations:\n required: false\n - type: textarea\n attributes:\n label: Background\n description: Discuss prior art and provide context around your idea. Why do you think this feature is needed? Are there current alternatives?\n validations:\n required: true\n - type: textarea\n attributes:\n label: Proposal\n description: How should this feature be implemented? Are you interested in contributing?\n validations:\n required: true\n
|
dataset_sample\yaml\vercel_turborepo\.github\DISCUSSION_TEMPLATE\ideas.yml
|
ideas.yml
|
YAML
| 899 | 0.85 | 0 | 0 |
node-utils
| 642 |
2024-12-05T22:30:25.301897
|
BSD-3-Clause
| false |
95501abd8170364372e2b6be15b6ea9a
|
name: Turborepo bug report\ndescription: Create a bug report\nlabels: ["kind: bug", "needs: triage"]\n\nbody:\n - type: markdown\n attributes:\n value: |\n This template is to report bugs. Before opening a new issue, please do a [search](https://github.com/vercel/turborepo/issues) of existing issues and :+1: upvote the existing issue instead. This will result in a quicker resolution.\n\n If you need help with your own project, you can:\n - Start a discussion in the ["Help" section](https://github.com/vercel/turborepo/discussions/categories/help).\n - Ask a question in [the Turbo Discord server](https://turbo.build/discord).\n\n - type: checkboxes\n attributes:\n label: Verify canary release\n description: "Please install the canary version of `turbo` (e.g. `npm install turbo@canary`) to try the canary version of Turborepo. It includes all features and fixes that have not been released to the stable version yet. Some issues may already be fixed in the canary version, so please verify that your issue reproduces before opening a new issue."\n options:\n - label: I verified that the issue exists in the latest Turborepo canary release.\n required: true\n\n - type: input\n attributes:\n label: Link to code that reproduces this issue\n description: |\n A link to a **public** GitHub repository with a minimal reproduction. Ideally, minimal reproductions should be created using [`npx create-turbo@canary -e with-shell-commands`](https://github.com/vercel/turborepo/tree/main/examples/with-shell-commands) and should include only changes that contribute to the issue. You may also use [`npx create-turbo@canary -e <example-name>`](https://github.com/vercel/turborepo/tree/main/examples) to create a reproduction that includes frameworks if you believe your bug requires a specific framework to reproduce.\n validations:\n required: true\n\n - type: input\n attributes:\n label: Which canary version will you have in your reproduction?\n description: |\n To find the exact version installed in your reproduction, you can use `npm list turbo`, `yarn why turbo`, or `pnpm why turbo`.\n validations:\n required: true\n\n - type: textarea\n attributes:\n label: Environment information\n render: block\n description: |\n Run the command `turbo info` and paste its output here. Please review it in case there is sensitive information you don't want to share.\n\n - type: textarea\n attributes:\n label: Expected behavior\n description: |\n A clear and concise description of what you expected to happen.\n validations:\n required: true\n\n - type: textarea\n attributes:\n label: Actual behavior\n description: |\n A clear and concise description of the bug.\n validations:\n required: true\n\n - type: textarea\n attributes:\n label: To Reproduce\n description: |\n Steps to reproduce the unexpected behavior. Please provide clear code snippets that always reproduces the issue or a GitHub repository. Screenshots can be provided in "Additional context" below.\n validations:\n required: true\n\n - type: markdown\n attributes:\n value: |\n Another way you can help the maintainers is to pinpoint the `canary` version of `turbo` that introduced the issue. Check out our [releases](https://github.com/vercel/turborepo/releases), and try to find the first `canary` release that introduced the issue. While not required, this will help us narrow down the scope of the issue, and possibly point to the PR/code change that introduced it. You can install a specific version of `turbo` by running `npm install turbo@<version>`.\n - type: textarea\n attributes:\n label: Additional context\n description: |\n Any extra information that might help us investigate. For example, where are you deploying your application (Vercel, Docker, other platform)? Is it only reproducible on that platform, or locally too? Is the issue only happening in a specific browser? etc.\n placeholder: |\n I tested my reproduction against different canary releases, and the first one that introduced the bug was "2.2.4-canary.2", since reverting to "2.3.4-canary.1" works.\n or\n I am using GitHub Actions but running my tasks locally does not have the same issue.\n
|
dataset_sample\yaml\vercel_turborepo\.github\ISSUE_TEMPLATE\0-turborepo-bug-report.yml
|
0-turborepo-bug-report.yml
|
YAML
| 4,367 | 0.95 | 0.036585 | 0 |
python-kit
| 651 |
2023-12-22T00:03:54.623969
|
BSD-3-Clause
| false |
2f9bf695859536926137e2dd86f22979
|
name: "Documentation update or improvement"\ndescription: A request to update or improve documentation\ntitle: "📚 Docs: "\nlabels:\n - "area: docs"\nbody:\n - type: markdown\n attributes:\n value: Before opening this issue to request a docs improvement, is this something you can help us with? Your contributions are welcomed and appreciated.\n - type: markdown\n attributes:\n value: Thank you for helping us improve the docs!\n - type: textarea\n attributes:\n label: What is the improvement or update you wish to see?\n description: "Example: I would like to see more examples of how to use `turbo run`. Or, the `turbo run` docs are missing information."\n validations:\n required: true\n - type: textarea\n attributes:\n label: Is there any context that might help us understand?\n description: A clear description of any added context that might help us understand.\n validations:\n required: true\n - type: input\n attributes:\n label: Does the docs page already exist? Please link to it.\n description: "Example: https://turbo.build/repo/docs/reference/command-line-reference/run"\n validations:\n required: false\n
|
dataset_sample\yaml\vercel_turborepo\.github\ISSUE_TEMPLATE\1-docs.yml
|
1-docs.yml
|
YAML
| 1,180 | 0.95 | 0.033333 | 0 |
node-utils
| 812 |
2023-10-31T03:38:35.507190
|
GPL-3.0
| false |
f377a308c46a9675c264f727074b9bc5
|
blank_issues_enabled: false\ncontact_links:\n - name: Turbopack in Next.js Bug Report\n url: https://github.com/vercel/next.js/issues/new?assignees=&labels=template%3A+bug&projects=&template=1.bug_report.yml\n about: Create a Next.js bug report for the Turbopack team\n - name: Ask for help\n url: https://github.com/vercel/turborepo/discussions/new?category=help\n about: Need to ask a question? Get help from the community.\n - name: Submit an idea\n url: https://github.com/vercel/turborepo/discussions/new?category=ideas\n about: Make feature requests and suggest improvements.\n
|
dataset_sample\yaml\vercel_turborepo\.github\ISSUE_TEMPLATE\config.yml
|
config.yml
|
YAML
| 593 | 0.8 | 0.181818 | 0 |
awesome-app
| 144 |
2023-08-22T00:07:17.349169
|
Apache-2.0
| false |
d0900b6fe54568a24ae50b813fac698f
|
name: Benchmark Turborepo\n\non:\n workflow_dispatch:\n workflow_run:\n # Make sure this matches the name of the workflow in ./github/workflows/turborepo-release.yml.\n workflows: [Turborepo Release]\n types:\n - completed\n push:\n branches:\n - main\n paths:\n - "cli/**"\n - crates/turborepo*/**\n # - "benchmark/**" (we don't need to run benchmarks when the benchmark changes, next push will capture it)\n\njobs:\n benchmark:\n name: Benchmark turbo run\n timeout-minutes: 60\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n - uses: ./.github/actions/setup-node\n\n - name: Setup Turborepo Environment\n uses: ./.github/actions/setup-turborepo-environment\n with:\n github-token: "${{ secrets.GITHUB_TOKEN }}"\n\n - name: Build\n run: pnpm -F cli build:release\n\n - name: Run benchmarks\n run: pnpm -F @turbo/benchmark benchmark\n\n - name: Save to Tinybird\n run: |\n curl \\n -i \\n -F "ndjson=@./packages/turbo-benchmark/tinybird.ndjson" \\n -X POST \\n -H 'Authorization: Bearer ${{ secrets.TINYBIRD_TOKEN }}' \\n 'https://api.us-east.tinybird.co/v0/datasources?format=ndjson&name=turbo_benchmarks&mode=append'\n\n time-to-first-task:\n name: Benchmark TTFT\n timeout-minutes: 60\n runs-on: ${{ matrix.os.runner }}\n strategy:\n fail-fast: false\n matrix:\n os:\n - name: ubuntu\n runner: ubuntu-latest\n - name: macos\n runner: macos-latest\n - name: windows\n runner: windows-latest\n\n steps:\n - uses: actions/checkout@v4\n - name: Set filename for profile\n id: filename\n shell: bash\n run: |\n echo 'file_basename=${{ matrix.os.name }}' >> "$GITHUB_OUTPUT"\n echo 'filename=${{ matrix.os.name }}.json' >> "$GITHUB_OUTPUT"\n\n - name: Display filename\n shell: bash\n run: echo "${{ steps.filename.outputs.filename }}"\n\n - name: Setup Turborepo Environment\n uses: ./.github/actions/setup-turborepo-environment\n with:\n github-token: "${{ secrets.GITHUB_TOKEN }}"\n\n - name: Build Turborepo from source\n run: pnpm -F cli build:release\n\n - name: Run benchmarks\n shell: bash\n # ttft script will normalize filepath and place the profile in the benchmark directory.\n run: pnpm -F @turbo/benchmark ttft "${{ steps.filename.outputs.filename }}"\n\n - name: Upload Artifacts\n uses: actions/upload-artifact@v4\n with:\n name: profiles-${{ matrix.os.name }} # This name will be the folder each file gets downloaded to\n if-no-files-found: error\n # cwd is root of the repository, so we need the benchmark/ prefixed path\n path: |\n packages/turbo-benchmark/profiles/${{ steps.filename.outputs.filename }}\n packages/turbo-benchmark/profiles/${{ steps.filename.outputs.file_basename }}-ttft.json\n\n # Send each of the profiles generated from the previous job to TinyBird\n # We'll wait for all profiles to complete before sending.\n send-to-tinybird:\n name: Send to Tinybird\n needs: [time-to-first-task]\n runs-on: ubuntu-latest\n env:\n TINYBIRD_TOKEN: ${{secrets.TINYBIRD_TOKEN}}\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Setup Node\n uses: ./.github/actions/setup-node\n\n - name: Download profiles\n uses: actions/download-artifact@v4\n with:\n path: packages/turbo-benchmark/profiles/\n pattern: profiles-*\n merge-multiple: true\n\n - name: Display TTFT Data\n shell: bash\n run: |\n ls -al packages/turbo-benchmark/profiles\n cat packages/turbo-benchmark/profiles/ubuntu-ttft.json\n cat packages/turbo-benchmark/profiles/macos-ttft.json\n cat packages/turbo-benchmark/profiles/windows-ttft.json\n\n - name: Send data to TinyBird\n shell: bash\n run: |\n cd packages/turbo-benchmark\n node -r esbuild-register ./src/ttft/tinybird.ts profiles/ubuntu-ttft.json ${{github.run_id}}\n node -r esbuild-register ./src/ttft/tinybird.ts profiles/macos-ttft.json ${{github.run_id}}\n node -r esbuild-register ./src/ttft/tinybird.ts profiles/windows-ttft.json ${{github.run_id}}\n\n send-to-slack:\n name: Send to Slack\n # Wait for send-to-tinybird so we can get aggregate data points\n # before sending to slack.\n needs: [send-to-tinybird]\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n\n - name: Setup Node\n uses: ./.github/actions/setup-node\n\n - name: Download profiles\n uses: actions/download-artifact@v4\n with:\n path: packages/turbo-benchmark/profiles/\n pattern: profiles-*\n merge-multiple: true\n\n - name: Display TTFT Data\n shell: bash\n run: |\n ls -al packages/turbo-benchmark/profiles\n cat packages/turbo-benchmark/profiles/ubuntu-ttft.json\n cat packages/turbo-benchmark/profiles/macos-ttft.json\n cat packages/turbo-benchmark/profiles/windows-ttft.json\n\n # TODO: compare results to previous data and only post regressions\n - name: Create Slack payload\n shell: bash\n env:\n BLOB_READ_WRITE_TOKEN: ${{ secrets.BLOB_READ_WRITE_TOKEN }}\n TINYBIRD_TOKEN: ${{ secrets.TINYBIRD_TOKEN }}\n run: |\n cd packages/turbo-benchmark\n node -r esbuild-register ./src/ttft/slack.ts ${{github.run_id}}\n\n - name: Debug Slack payload\n shell: bash\n run: cat packages/turbo-benchmark/slack-payload.json | jq\n\n - name: Send payload to slack\n uses: slackapi/[email protected]\n with:\n payload-file-path: "packages/turbo-benchmark/slack-payload.json"\n env:\n SLACK_WEBHOOK_URL: "${{ secrets.TURBOREPO_PERF_BENCHMARK_SLACK_WEBHOOK_URL }}"\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\bench-turborepo.yml
|
bench-turborepo.yml
|
YAML
| 6,006 | 0.8 | 0.021858 | 0.057692 |
node-utils
| 552 |
2024-07-11T23:47:13.087784
|
Apache-2.0
| false |
1aa5da1304652423dff98f821a5e1224
|
name: Docs checks\n\non:\n pull_request:\n paths:\n - "docs/**"\n - ".github/actions/validate-docs-links/**"\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}\n cancel-in-progress: ${{ github.event_name == 'pull_request' }}\n\njobs:\n validate-docs-links:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n\n - uses: ./.github/actions/setup-node\n with:\n node-version: 20\n\n - name: Run link checker\n run: cd docs/link-checker && pnpm run check-links\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\docs.yml
|
docs.yml
|
YAML
| 633 | 0.8 | 0 | 0 |
vue-tools
| 586 |
2024-12-12T17:49:33.235348
|
BSD-3-Clause
| false |
b3902b923f4d5c685cf773234d743224
|
name: Lint pull request title\n\non:\n pull_request_target:\n types:\n - opened\n - edited\n - synchronize\n - reopened\n\npermissions:\n pull-requests: read\n\njobs:\n main:\n name: Validate PR title\n runs-on: ubuntu-latest\n steps:\n - uses: amannn/action-semantic-pull-request@v5\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n with:\n # Configure that a scope must always be provided.\n requireScope: false\n # Configure additional validation for the subject based on a regex.\n # Ensures that the subject doesn't start with an uppercase character.\n subjectPattern: ^[^A-Z].*$\n # If `subjectPattern` is configured, you can use this property to override\n # the default error message that is shown when the pattern doesn't match.\n # The variables `subject` and `title` can be used within the message.\n subjectPatternError: |\n The subject "{subject}" found in the pull request title "{title}" doesn't match the configured pattern.\n Please ensure that the subject doesn't start with an uppercase character.\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\lint-pr-title.yml
|
lint-pr-title.yml
|
YAML
| 1,154 | 0.95 | 0.030303 | 0.2 |
react-lib
| 501 |
2024-11-10T22:13:52.217801
|
BSD-3-Clause
| false |
fbb58f798e96e75a7c5fb6fc359e9fe0
|
name: Lint\non:\n push:\n branches: [main]\n pull_request:\n paths:\n - ".github/actions/**"\n - .github/workflows/lint.yml\n - "**/*.{yml,yaml,md,mdx,js,jsx,ts,tsx,json,toml,css}"\n - pnpm-lock.yaml\n - package.json\n - "Cargo.**"\n - "crates/**"\n - ".cargo/**"\n - rust-toolchain\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}\n cancel-in-progress: ${{ github.event_name == 'pull_request' }}\n\npermissions:\n actions: write\n contents: read\n pull-requests: read\n\njobs:\n rust_lint:\n name: Rust lints\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n\n - name: Setup Rust\n uses: ./.github/actions/setup-rust\n with:\n github-token: "${{ secrets.GITHUB_TOKEN }}"\n\n - name: Run cargo fmt check\n run: |\n cargo fmt --check\n\n - name: Check Cargo.toml formatting (taplo)\n run: npx @taplo/[email protected] format --check\n\n - name: Check licenses\n uses: EmbarkStudios/cargo-deny-action@v2\n with:\n command: check licenses\n\n format_lint:\n name: Formatting\n runs-on: ubuntu-latest\n env:\n TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}\n TURBO_TEAM: ${{ vars.TURBO_TEAM }}\n TURBO_REMOTE_ONLY: true\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n\n - name: "Setup Node"\n uses: ./.github/actions/setup-node\n with:\n extra-flags: --no-optional\n node-version: "20"\n\n - name: Install Global Turbo\n uses: ./.github/actions/install-global-turbo\n\n - name: Lint\n # Manually set TURBO_API to an empty string to override Hetzner env\n run: |\n TURBO_API= turbo run lint --env-mode=strict\n\n cleanup:\n name: Cleanup\n needs:\n - rust_lint\n - format_lint\n if: always()\n uses: ./.github/workflows/pr-clean-caches.yml\n secrets: inherit\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\lint.yml
|
lint.yml
|
YAML
| 1,951 | 0.8 | 0.012048 | 0.014085 |
python-kit
| 645 |
2024-01-01T08:07:16.004439
|
GPL-3.0
| false |
4ee993e3eb4dc572ff551d209f1ec56f
|
# Turborepo LSP Pipeline\n#\n# Currently this just dumps the LSP binaries into the artifacts, but in the future\n# we will want to do the entire packaging process here.\n\nname: Turborepo LSP\n\non:\n workflow_dispatch:\n\njobs:\n build-rust:\n name: "Build Rust"\n strategy:\n fail-fast: false\n matrix:\n settings:\n - host: macos-13\n target: "x86_64-apple-darwin"\n container-options: "--rm"\n - host: macos-13\n target: "aarch64-apple-darwin"\n container-options: "--rm"\n - host: ubuntu-latest\n container-options: "--platform=linux/amd64 --rm"\n container-setup: "sudo apt-get update && sudo apt-get install -y curl musl-tools"\n target: "x86_64-unknown-linux-musl"\n setup: "sudo apt-get install -y build-essential"\n - host: ubuntu-latest\n container-options: "--rm"\n target: "aarch64-unknown-linux-musl"\n rust-build-env: 'CC_aarch64_unknown_linux_musl=clang AR_aarch64_unknown_linux_musl=llvm-ar RUSTFLAGS="-Clink-self-contained=yes -Clinker=rust-lld"'\n setup: "sudo apt-get update && sudo apt-get install -y build-essential musl-tools clang llvm gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu"\n - host: windows-latest\n target: x86_64-pc-windows-msvc\n setup: "rustup set default-host x86_64-pc-windows-msvc"\n container-options: "--rm"\n - host: windows-latest\n target: aarch64-pc-windows-msvc\n setup: "rustup set default-host aarch64-pc-windows-msvc"\n container-options: "--rm"\n runs-on: ${{ matrix.settings.host }}\n container:\n image: ${{ matrix.settings.container }}\n options: ${{ matrix.settings.container-options }}\n steps:\n - name: Checkout repo\n uses: actions/checkout@v4\n - name: Setup Container\n if: ${{ matrix.settings.container-setup }}\n run: ${{ matrix.settings.container-setup }}\n\n - name: Setup Protoc\n uses: arduino/setup-protoc@v3\n with:\n version: "26.x"\n repo-token: ${{ secrets.GITHUB_TOKEN }}\n\n - name: Setup capnproto\n uses: ./.github/actions/setup-capnproto\n\n - name: Rust Setup\n uses: ./.github/actions/setup-rust\n with:\n github-token: ${{ secrets.GITHUB_TOKEN }}\n targets: ${{ matrix.settings.target }}\n\n - name: Build Setup\n shell: bash\n if: ${{ matrix.settings.setup }}\n run: ${{ matrix.settings.setup }}\n\n - name: Build\n run: ${{ matrix.settings.rust-build-env }} cargo build --profile release-turborepo-lsp -p turborepo-lsp --target ${{ matrix.settings.target }}\n\n - name: Upload Artifacts\n uses: actions/upload-artifact@v4\n with:\n name: turborepo-lsp-${{ matrix.settings.target }}\n path: target/${{ matrix.settings.target }}/release-turborepo-lsp/turborepo-lsp*\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\lsp.yml
|
lsp.yml
|
YAML
| 2,959 | 0.8 | 0.025 | 0.056338 |
awesome-app
| 125 |
2024-07-05T05:05:35.833398
|
GPL-3.0
| false |
93566661b4026db5428fd6f6519d3306
|
name: Cleanup branch caches\non:\n pull_request:\n types: [opened, closed, reopened, synchronize]\n push:\n workflow_dispatch:\n workflow_call:\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\npermissions:\n actions: write\n\njobs:\n cleanup:\n runs-on: ubuntu-latest\n if: ${{ github.ref != 'refs/heads/main' }}\n steps:\n - name: Cleanup\n env:\n GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n run: |\n gh extension install actions/gh-actions-cache\n\n REPO=${{ github.repository }}\n BRANCH=${{ github.ref }}\n\n echo "Fetching list of cache key"\n cacheKeysForPR=$(gh actions-cache list -R "$REPO" -B "$BRANCH" --limit 100 | cut -f 1)\n\n ## Setting this to not fail the workflow while deleting cache keys.\n set +e\n echo "Deleting caches..."\n for cacheKey in $cacheKeysForPR\n do\n gh actions-cache delete "$cacheKey" -R "$REPO" -B "$BRANCH" --confirm\n done\n echo "Done"\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\pr-clean-caches.yml
|
pr-clean-caches.yml
|
YAML
| 1,050 | 0.8 | 0.075 | 0.029412 |
vue-tools
| 316 |
2024-02-04T04:43:59.150436
|
MIT
| false |
5f5e9ff90331ca4884626b4d08806c70
|
name: Turborepo Compare Cache Item\n\non:\n workflow_dispatch:\n inputs:\n version:\n description: Turborepo release to test.\n type: string\n default: "canary"\n\njobs:\n generate_cache_artifact:\n strategy:\n matrix:\n os: [macos-latest, ubuntu-latest, windows-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - name: Setup Node.js\n uses: actions/setup-node@v4\n with:\n node-version: 18\n\n - name: create-turbo\n run: |\n npm install -g pnpm turbo@${{ inputs.version }}\n pnpm dlx create-turbo@${{ inputs.version }} my-turborepo pnpm\n\n - name: Run build\n run: |\n cd my-turborepo\n turbo run build --filter=docs --filter=web --summarize --skip-infer -vvv\n\n - name: Grab Turborepo artifacts\n uses: actions/upload-artifact@v4\n with:\n name: cache-item-${{ matrix.os }}-${{ inputs.version }}\n path: |\n my-turborepo/node_modules/.cache/turbo\n my-turborepo/.turbo/runs\n retention-days: 1\n\n use_cache_artifact:\n needs: generate_cache_artifact\n strategy:\n fail-fast: false\n matrix:\n os: [macos-latest, ubuntu-latest, windows-latest]\n cache_os: [macos-latest, ubuntu-latest, windows-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - name: Setup Node.js\n uses: actions/setup-node@v4\n with:\n node-version: 18\n\n - name: create-turbo\n run: |\n npm install -g pnpm turbo@${{ inputs.version }}\n pnpm dlx create-turbo@${{ inputs.version }} my-turborepo pnpm\n\n - name: Download cache artifacts\n uses: actions/download-artifact@v4\n with:\n name: cache-item-${{ matrix.cache_os }}-${{ inputs.version }}\n path: my-turborepo\n\n - name: Check for cache hit\n run: |\n cd my-turborepo\n rm .turbo/runs/*.json\n turbo run build --filter=docs --filter=web --summarize --skip-infer -vvv\n cat .turbo/runs/*.json | jq -e '.execution.cached == 2'\n\n - name: Check for functional server\n run: |\n curl https://raw.githubusercontent.com/vercel/turbo/main/scripts/server.js -O\n node server.js my-turborepo/apps/docs\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\turborepo-compare-cache-item.yml
|
turborepo-compare-cache-item.yml
|
YAML
| 2,267 | 0.95 | 0.025316 | 0 |
awesome-app
| 746 |
2025-03-06T00:17:36.906562
|
MIT
| false |
efaffd17939ac298c4b302ea9fb8c26e
|
name: Turborepo Library Release\n\non:\n workflow_dispatch:\n inputs:\n dry_run:\n description: Do a dry run, skipping the final publish step.\n type: boolean\n\njobs:\n build:\n defaults:\n run:\n shell: bash -leo pipefail {0}\n\n strategy:\n fail-fast: false\n matrix:\n settings:\n - host: macos-latest\n target: aarch64-apple-darwin\n\n - host: macos-latest\n target: x86_64-apple-darwin\n\n - host: ubuntu-latest\n target: aarch64-unknown-linux-gnu\n setup: |\n sudo apt update\n sudo apt install -y g++-aarch64-linux-gnu libc6-dev-arm64-cross xz-utils\n mkdir zig\n curl --show-error --location https://ziglang.org/download/0.14.0/zig-linux-x86_64-0.14.0.tar.xz | tar -J -xf - -C zig --strip-components 1\n export PATH=$PATH:$(pwd)/zig\n echo "$(pwd)/zig" >> $GITHUB_PATH\n\n - host: ubuntu-latest\n target: x86_64-unknown-linux-gnu\n container: amazon/aws-lambda-nodejs:20\n install: |\n microdnf install -y gcc gcc-c++ git tar xz\n curl https://sh.rustup.rs -sSf | bash -s -- -y\n npm i -g [email protected]\n mkdir ../zig\n curl --show-error --location https://ziglang.org/download/0.14.0/zig-linux-x86_64-0.14.0.tar.xz | tar -J -xf - -C ../zig --strip-components 1\n export PATH=$PATH:$(pwd)/../zig\n echo "$(pwd)/../zig" >> $GITHUB_PATH\n setup: |\n pnpm install\n\n - host: ubuntu-latest\n target: x86_64-unknown-linux-musl\n container: ghcr.io/napi-rs/napi-rs/nodejs-rust:stable-2023-09-17-alpine\n install: |\n apk update && apk upgrade\n apk add libc6-compat curl\n echo /root/.cargo/bin >> ${GITHUB_PATH}\n echo /usr/local/cargo/bin/rustup >> ${GITHUB_PATH}\n setup: |\n export PATH=/usr/local/cargo/bin/rustup:/root/.cargo/bin:${PATH}\n rustup show active-toolchain\n dirname $(rustup which cargo) >> ${GITHUB_PATH}\n pnpm install\n\n - host: ubuntu-latest\n target: aarch64-unknown-linux-musl\n container: ghcr.io/napi-rs/napi-rs/nodejs-rust:stable-2023-09-17-alpine\n install: |\n apk update && apk upgrade\n apk add libc6-compat curl\n echo /root/.cargo/bin >> ${GITHUB_PATH}\n echo /usr/local/cargo/bin/rustup >> ${GITHUB_PATH}\n echo /aarch64-linux-musl-cross/bin >> ${GITHUB_PATH}\n setup: |\n export PATH=/aarch64-linux-musl-cross/bin:/usr/local/cargo/bin/rustup:/root/.cargo/bin:${PATH}\n rustup show active-toolchain\n rustup target add aarch64-unknown-linux-musl\n dirname $(rustup which cargo) >> ${GITHUB_PATH}\n pnpm install\n rust_env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=/aarch64-linux-musl-cross/bin/aarch64-linux-musl-gcc RUSTFLAGS="-Ctarget-feature=-crt-static"\n\n - host: windows-latest\n target: aarch64-pc-windows-msvc\n\n - host: windows-latest\n target: x86_64-pc-windows-msvc\n\n runs-on: ${{ matrix.settings.host }}\n container:\n image: ${{ matrix.settings.container }}\n steps:\n - name: Install Packages\n run: ${{ matrix.settings.install }}\n if: ${{ matrix.settings.install }}\n\n - name: Checkout\n uses: actions/checkout@v3\n with:\n fetch-depth: 0\n\n - name: Setup Rust\n uses: ./.github/actions/setup-rust\n with:\n targets: ${{ matrix.settings.target }}\n github-token: ${{ github.token }}\n if: ${{ !matrix.settings.install }}\n\n - name: Setup Node\n uses: ./.github/actions/setup-node\n with:\n enable-corepack: false\n if: ${{ !matrix.settings.install }}\n\n - name: Setup toolchain\n run: ${{ matrix.settings.setup }}\n if: ${{ matrix.settings.setup }}\n\n - name: Build native library\n run: |\n export PATH=/usr/local/cargo/bin/rustup:/root/.cargo/bin:${PATH}\n cd packages/turbo-repository\n ${{ matrix.settings.rust_env }} pnpm build:release --target=${{ matrix.settings.target }}\n\n - name: Upload Artifacts\n uses: actions/upload-artifact@v4\n with:\n name: turbo-library-${{ matrix.settings.target }}\n path: packages/turbo-repository/native\n\n package:\n name: Publish to NPM\n runs-on: ubuntu-latest\n needs: [build]\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n with:\n fetch-depth: 0\n\n - name: Configure git\n run: |\n git config --global user.name 'Turbobot'\n git config --global user.email '[email protected]'\n\n - name: Download Artifacts\n uses: actions/download-artifact@v4\n with:\n path: native-packages\n\n - name: Move artifacts into place\n run: |\n mv native-packages/turbo-library-aarch64-apple-darwin/@turbo/repository.darwin-arm64.node packages/turbo-repository/npm/darwin-arm64/\n mv native-packages/turbo-library-x86_64-apple-darwin/@turbo/repository.darwin-x64.node packages/turbo-repository/npm/darwin-x64/\n mv native-packages/turbo-library-aarch64-unknown-linux-gnu/@turbo/repository.linux-arm64-gnu.node packages/turbo-repository/npm/linux-arm64-gnu/\n mv native-packages/turbo-library-aarch64-unknown-linux-musl/@turbo/repository.linux-arm64-musl.node packages/turbo-repository/npm/linux-arm64-musl/\n mv native-packages/turbo-library-x86_64-unknown-linux-gnu/@turbo/repository.linux-x64-gnu.node packages/turbo-repository/npm/linux-x64-gnu/\n mv native-packages/turbo-library-x86_64-unknown-linux-musl/@turbo/repository.linux-x64-musl.node packages/turbo-repository/npm/linux-x64-musl/\n mv native-packages/turbo-library-aarch64-pc-windows-msvc/@turbo/repository.win32-arm64-msvc.node packages/turbo-repository/npm/win32-arm64-msvc/\n mv native-packages/turbo-library-x86_64-pc-windows-msvc/@turbo/repository.win32-x64-msvc.node packages/turbo-repository/npm/win32-x64-msvc/\n\n - name: Build Meta Package\n run: |\n cd packages/turbo-repository/js\n npm run build\n\n - name: Package Artifacts\n run: |\n mkdir tarballs\n npm pack packages/turbo-repository/npm/darwin-arm64\n npm pack packages/turbo-repository/npm/darwin-x64\n npm pack packages/turbo-repository/npm/linux-arm64-gnu\n npm pack packages/turbo-repository/npm/linux-arm64-musl\n npm pack packages/turbo-repository/npm/linux-x64-gnu\n npm pack packages/turbo-repository/npm/linux-x64-musl\n npm pack packages/turbo-repository/npm/win32-arm64-msvc\n npm pack packages/turbo-repository/npm/win32-x64-msvc\n npm pack packages/turbo-repository/js\n mv *.tgz tarballs/\n\n - name: Upload Artifacts\n uses: actions/upload-artifact@v4\n with:\n name: Upload Tarballs\n path: tarballs\n\n - name: Publish Artifacts\n env:\n NPM_TOKEN: ${{ secrets.NPM_TOKEN }}\n if: ${{ !inputs.dry_run }}\n run: |\n npm config set --location=project "//registry.npmjs.org/:_authToken" ${NPM_TOKEN}\n VERSION=$(jq -r .version packages/turbo-repository/js/package.json)\n cd tarballs\n ls\n TAG="canary"\n npm publish -ddd --tag ${TAG} --access public turbo-repository-darwin-arm64-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-darwin-x64-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-linux-arm64-gnu-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-linux-arm64-musl-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-linux-x64-gnu-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-linux-x64-musl-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-win32-arm64-msvc-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-win32-x64-msvc-${VERSION}.tgz\n npm publish -ddd --tag ${TAG} --access public turbo-repository-${VERSION}.tgz\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\turborepo-library-release.yml
|
turborepo-library-release.yml
|
YAML
| 8,508 | 0.8 | 0.024631 | 0 |
python-kit
| 759 |
2025-01-06T06:38:21.862189
|
MIT
| false |
e54147f7beabaccf8384dd8ee34aa8be
|
# Turborepo Release Pipeline\n#\n# This release consists of a few steps\n#\n# 1. Create a staging branch\n# 2. Run some smoke tests on that branch\n# 3. Build the Rust binary\n# 4. Publish JS packages npm (including turbo itself)\n# 5. Create a release branch and open a PR.\n\n# You can opt into a dry run, which will skip publishing to npm and opening the release branch\n\nname: Turborepo Release\n\nenv:\n CARGO_PROFILE_RELEASE_LTO: true\n NPM_TOKEN: ${{ secrets.NPM_TOKEN }}\n RELEASE_TURBO_CLI: true # TODO: do we need this?\n\non:\n workflow_dispatch:\n inputs:\n increment:\n description: "SemVer Increment (prerelease = bump canary)"\n required: true\n default: "prerelease"\n type: choice\n options:\n # Bump the canary version of the existing semver release\n - prerelease\n # Bump to the next patch version, creating its first canary release\n - prepatch\n # Bump to the next minor version, creating its first canary release\n - preminor\n # Bump to the next major version, creating its first canary release\n - premajor\n # Bump to the next patch version\n - patch\n # Bump to the next minor version\n - minor\n # Bump to the next major version\n - major\n dry_run:\n description: "Do a dry run, skipping the final publish step."\n type: boolean\n tag-override:\n description: "Override default npm dist-tag for the release. Should only be used for backporting"\n required: false\n type: string\n ci-tag-override:\n description: "Override default npm dist-tag to use for running tests. Should only be used when the most recent release was faulty"\n required: false\n type: string\n default: ""\n\njobs:\n stage:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: ./.github/actions/setup-node\n with:\n enable-corepack: false\n - name: Configure git\n run: |\n git config --global user.name 'Turbobot'\n git config --global user.email '[email protected]'\n - name: Version\n run: |\n ./scripts/version.js ${{ inputs.increment }} ${{ inputs.tag-override }}\n cat version.txt\n - name: Stage Commit\n id: stage\n run: cd cli && make stage-release && echo "STAGE_BRANCH=$(git branch --show-current)" >> $GITHUB_OUTPUT\n - name: Show Stage Commit\n run: echo "${{ steps.stage.outputs.STAGE_BRANCH }}"\n outputs:\n stage-branch: "${{ steps.stage.outputs.STAGE_BRANCH }}"\n\n rust-smoke-test:\n name: Rust Unit Tests\n runs-on: ubuntu-latest\n needs: [stage]\n steps:\n - name: Show Stage Commit\n run: echo "${{ needs.stage.outputs.stage-branch }}"\n - uses: actions/checkout@v4\n with:\n ref: ${{ needs.stage.outputs.stage-branch }}\n - name: Setup Turborepo Environment\n uses: ./.github/actions/setup-turborepo-environment\n with:\n github-token: "${{ secrets.GITHUB_TOKEN }}"\n\n - name: Run Rust Unit Tests\n run: cargo groups test turborepo\n\n js-smoke-test:\n name: JS Package Tests\n runs-on: ubuntu-latest\n needs: [stage]\n steps:\n - name: Show Stage Commit\n run: echo "${{ needs.stage.outputs.stage-branch }}"\n - uses: actions/checkout@v4\n with:\n ref: ${{ needs.stage.outputs.stage-branch }}\n - name: Setup Turborepo Environment\n uses: ./.github/actions/setup-turborepo-environment\n with:\n github-token: "${{ secrets.GITHUB_TOKEN }}"\n node-version: "20"\n - name: Install Global Turbo\n uses: ./.github/actions/install-global-turbo\n with:\n turbo-version: "${{ github.event.inputs.ci-tag-override }}"\n - name: Run JS Package Tests\n run: turbo run check-types test --filter="./packages/*" --color\n\n build-rust:\n name: "Build Rust"\n needs: [stage]\n strategy:\n fail-fast: false\n matrix:\n settings:\n - host: macos-latest\n target: "x86_64-apple-darwin"\n - host: macos-latest\n target: "aarch64-apple-darwin"\n - host: ubuntu-latest\n target: "x86_64-unknown-linux-musl"\n setup: "sudo apt-get update && sudo apt-get install -y build-essential clang lldb llvm libclang-dev curl musl-tools sudo unzip"\n - host: ubuntu-latest\n target: "aarch64-unknown-linux-musl"\n rust-build-env: 'CC_aarch64_unknown_linux_musl=clang AR_aarch64_unknown_linux_musl=llvm-ar RUSTFLAGS="-Clink-self-contained=yes -Clinker=rust-lld"'\n setup: "sudo apt-get update && sudo apt-get install -y build-essential musl-tools clang llvm gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu"\n - host: windows-latest\n target: x86_64-pc-windows-msvc\n runs-on: ${{ matrix.settings.host }}\n steps:\n - name: Show Stage Commit\n run: echo "${{ needs.stage.outputs.stage-branch }}"\n - name: Checkout repo\n uses: actions/checkout@v4\n with:\n ref: "${{ needs.stage.outputs.stage-branch }}"\n\n - name: Setup Protoc\n uses: arduino/setup-protoc@v3\n with:\n version: "26.x"\n repo-token: ${{ secrets.GITHUB_TOKEN }}\n\n - name: Setup capnproto\n uses: ./.github/actions/setup-capnproto\n\n - name: Rust Setup\n uses: actions-rust-lang/setup-rust-toolchain@v1\n with:\n target: ${{ matrix.settings.target }}\n # needed to not make it override the defaults\n rustflags: ""\n # we want more specific settings\n cache: false\n\n - name: Build Setup\n shell: bash\n if: ${{ matrix.settings.setup }}\n run: ${{ matrix.settings.setup }}\n\n - name: Build\n run: ${{ matrix.settings.rust-build-env }} cargo build --profile release-turborepo -p turbo --target ${{ matrix.settings.target }}\n\n - name: Upload Artifacts\n uses: actions/upload-artifact@v4\n with:\n name: turbo-${{ matrix.settings.target }}\n path: target/${{ matrix.settings.target }}/release-turborepo/turbo*\n\n npm-publish:\n name: "Publish To NPM"\n runs-on: ubuntu-latest\n needs: [stage, build-rust, rust-smoke-test, js-smoke-test]\n steps:\n - name: Show Stage Commit\n run: echo "${{ needs.stage.outputs.stage-branch }}"\n - uses: actions/checkout@v4\n with:\n ref: "${{ needs.stage.outputs.stage-branch }}"\n - run: git fetch origin --tags\n - uses: ./.github/actions/setup-node\n with:\n enable-corepack: false\n\n - name: Install Global Turbo\n uses: ./.github/actions/install-global-turbo\n with:\n turbo-version: "${{ github.event.inputs.ci-tag-override }}"\n\n - name: Configure git\n run: |\n git config --global user.name 'Turbobot'\n git config --global user.email '[email protected]'\n\n - name: Download Rust artifacts\n uses: actions/download-artifact@v4\n with:\n path: rust-artifacts\n\n - name: Move Rust artifacts into place\n run: |\n mv rust-artifacts/turbo-aarch64-apple-darwin cli/dist-darwin-arm64\n mv rust-artifacts/turbo-aarch64-unknown-linux-musl cli/dist-linux-arm64\n cp -r rust-artifacts/turbo-x86_64-pc-windows-msvc cli/dist-windows-arm64\n mv rust-artifacts/turbo-x86_64-unknown-linux-musl cli/dist-linux-x64\n mv rust-artifacts/turbo-x86_64-apple-darwin cli/dist-darwin-x64\n mv rust-artifacts/turbo-x86_64-pc-windows-msvc cli/dist-windows-x64\n\n - name: Perform Release\n run: cd cli && make publish-turbo SKIP_PUBLISH=${{ inputs.dry_run && '--skip-publish' || '' }}\n env:\n NPM_TOKEN: ${{ secrets.NPM_TOKEN }}\n\n # Upload published artifacts in case they are needed for debugging later\n - name: Upload Artifacts\n uses: actions/upload-artifact@v4\n with:\n name: turbo-combined\n path: cli/dist\n\n create-release-pr:\n name: "Open Release Branch PR"\n needs: [stage, npm-publish]\n runs-on: ubuntu-latest\n steps:\n - name: Show Stage Commit\n run: echo "${{ needs.stage.outputs.stage-branch }}"\n - uses: actions/checkout@v4\n with:\n ref: ${{ needs.stage.outputs.stage-branch }}\n - name: Get version\n id: getVersion\n run: echo "version=$(head -n 1 version.txt)" >> $GITHUB_OUTPUT\n - name: Create pull request\n uses: thomaseizinger/create-pull-request@master\n if: ${{ !inputs.dry_run }}\n with:\n github_token: ${{ secrets.GITHUB_TOKEN }}\n head: ${{ needs.stage.outputs.stage-branch }}\n base: main\n title: "release(turborepo): ${{ steps.getVersion.outputs.version }}"\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\turborepo-release.yml
|
turborepo-release.yml
|
YAML
| 8,866 | 0.95 | 0.02381 | 0.087336 |
python-kit
| 874 |
2023-10-10T22:51:34.510624
|
BSD-3-Clause
| false |
efa6354c7681d0fb095116c4f86516a0
|
name: Turborepo Top Issues\n\non:\n schedule:\n - cron: "0 13 * * 1" # Every Monday at 1PM UTC (9AM EST)\n workflow_dispatch:\n\njobs:\n run:\n # if: github.repository_owner == 'vercel'\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: actions/setup-node@v4\n with:\n node-version: 20\n - run: corepack enable\n - run: pnpm install\n - name: "Get Top Issues"\n run: node ./packages/top-issues/src/index.mjs packages/top-issues\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n - name: "Show slack payload"\n run: cat packages/top-issues/slack-payload.json\n - name: Send payload to slack\n uses: slackapi/[email protected]\n with:\n payload-file-path: "packages/top-issues/slack-payload.json"\n env:\n SLACK_WEBHOOK_URL: "${{ secrets.TURBOREPO_REPO_STATS_SLACK_WEBHOOK_URL }}"\n
|
dataset_sample\yaml\vercel_turborepo\.github\workflows\turborepo-top-issues.yml
|
turborepo-top-issues.yml
|
YAML
| 920 | 0.8 | 0.033333 | 0.035714 |
awesome-app
| 212 |
2025-03-16T12:39:16.577259
|
Apache-2.0
| false |
ed6fde14b5822352b77473b122486bff
|
name: Release\n\non:\n push:\n branches:\n - main\n\nconcurrency: ${{ github.workflow }}-${{ github.ref }}\n\njobs:\n release:\n name: Release\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Repo\n uses: actions/checkout@v4\n\n - name: Setup Node.js 20.x\n uses: actions/setup-node@v4\n with:\n node-version: 20\n\n - name: Install Dependencies\n run: yarn\n\n - name: Create Release Pull Request or Publish to npm\n id: changesets\n uses: changesets/action@v1\n with:\n # This expects you to have a script called release which does a build for your packages and calls changeset publish\n publish: yarn release\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n NPM_TOKEN: ${{ secrets.NPM_TOKEN }}\n\n - name: Send a Slack notification if a publish happens\n if: steps.changesets.outputs.published == 'true'\n # You can do something when a publish happens.\n run: my-slack-bot send-notification --message "A new version of ${GITHUB_REPOSITORY} was published!"\n
|
dataset_sample\yaml\vercel_turborepo\examples\design-system\.github\workflows\release.yml
|
release.yml
|
YAML
| 1,095 | 0.8 | 0.076923 | 0.0625 |
node-utils
| 231 |
2024-08-09T10:02:57.822348
|
GPL-3.0
| false |
79a92a6f1f1c078a520b343029e9f51b
|
name: Release\n\non:\n push:\n branches:\n - main\n\nconcurrency: ${{ github.workflow }}-${{ github.ref }}\n\njobs:\n release:\n name: Release\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Repo\n uses: actions/checkout@v4\n\n - name: Setup pnpm 8\n uses: pnpm/action-setup@v3\n with:\n version: 8\n\n - name: Setup Node.js 20.x\n uses: actions/setup-node@v4\n with:\n node-version: 20.x\n\n - name: Install Dependencies\n run: pnpm i\n\n - name: Create Release Pull Request or Publish to npm\n id: changesets\n uses: changesets/action@v1\n with:\n # This expects you to have a script called release which does a build for your packages and calls changeset publish\n publish: pnpm release\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n NPM_TOKEN: ${{ secrets.NPM_TOKEN }}\n\n - name: Send a Slack notification if a publish happens\n if: steps.changesets.outputs.published == 'true'\n # You can do something when a publish happens.\n run: my-slack-bot send-notification --message "A new version of ${GITHUB_REPOSITORY} was published!"\n
|
dataset_sample\yaml\vercel_turborepo\examples\with-changesets\.github\workflows\release.yml
|
release.yml
|
YAML
| 1,197 | 0.8 | 0.068182 | 0.055556 |
awesome-app
| 832 |
2024-04-01T11:51:49.096514
|
Apache-2.0
| false |
45e95bd10e1b2316d91a400a227dae89
|
version: "3"\n\nservices:\n web:\n container_name: web\n build:\n context: .\n dockerfile: ./apps/web/Dockerfile\n restart: always\n ports:\n - 3000:3000\n networks:\n - app_network\n api:\n container_name: api\n build:\n context: .\n dockerfile: ./apps/api/Dockerfile\n restart: always\n ports:\n - 3001:3001\n networks:\n - app_network\n\n# Define a network, which allows containers to communicate\n# with each other, by using their container name as a hostname\nnetworks:\n app_network:\n external: true\n
|
dataset_sample\yaml\vercel_turborepo\examples\with-docker\docker-compose.yml
|
docker-compose.yml
|
YAML
| 555 | 0.8 | 0 | 0.074074 |
python-kit
| 564 |
2023-08-30T03:05:53.619888
|
Apache-2.0
| false |
29cc7f9ef7e12df1f8ac0640a4f0f994
|
version: "3"\n\nvolumes:\n database:\n driver: local\n\nservices:\n mysql:\n platform: linux/amd64\n image: mysql:8.0.32\n container_name: turborepo_mysql\n restart: always\n ports:\n - 3306:3306\n environment:\n MYSQL_DATABASE: turborepo\n MYSQL_ALLOW_EMPTY_PASSWORD: 1\n volumes:\n - database:/var/lib/mysql\n
|
dataset_sample\yaml\vercel_turborepo\examples\with-prisma\docker-compose.yml
|
docker-compose.yml
|
YAML
| 338 | 0.7 | 0 | 0 |
react-lib
| 565 |
2023-12-01T11:22:55.421738
|
Apache-2.0
| false |
f9794c466a0d4b6e48d6675573027386
|
version: "3"\n\nvolumes:\n database:\n driver: local\n\nservices:\n mysql:\n platform: linux/amd64\n image: mysql:8.0.32\n container_name: turborepo_mysql\n restart: always\n ports:\n - 3306:3306\n environment:\n MYSQL_DATABASE: root\n MYSQL_ALLOW_EMPTY_PASSWORD: 1\n volumes:\n - database:/var/lib/mysql\n
|
dataset_sample\yaml\vercel_turborepo\examples\with-typeorm\docker-compose.yml
|
docker-compose.yml
|
YAML
| 333 | 0.7 | 0 | 0 |
python-kit
| 592 |
2025-06-24T16:33:21.876905
|
Apache-2.0
| false |
c307bbfcea1428900a6905b26cb22fdb
|
run:\n timeout: 2m\n\nlinters:\n enable:\n - revive\n\nissues:\n exclude-rules:\n - linters:\n - staticcheck\n text: "SA(4003|1019|5011):"\n include:\n - EXC0012\n - EXC0014\n\nlinters-settings:\n errcheck:\n exclude-functions:\n - "fmt.Fprintf"\n - "fmt.Fprint"\n - "(net/http.ResponseWriter).Write"\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.golangci.yml
|
.golangci.yml
|
YAML
| 313 | 0.85 | 0 | 0 |
vue-tools
| 134 |
2024-03-31T00:22:05.777275
|
Apache-2.0
| false |
7df162759643c974cf4654a8449831ae
|
version: 2\nupdates:\n - package-ecosystem: "github-actions"\n directory: "/"\n schedule:\n interval: "daily"\n - package-ecosystem: "gomod"\n directory: "/"\n schedule:\n interval: "weekly"\n open-pull-requests-limit: 0\n - package-ecosystem: "bundler"\n directory: "/docs"\n schedule:\n interval: "weekly"\n open-pull-requests-limit: 0\n - package-ecosystem: "gomod"\n directory: "/app/vmui/packages/vmui/web"\n schedule:\n interval: "weekly"\n open-pull-requests-limit: 0\n - package-ecosystem: "docker"\n directory: "/"\n schedule:\n interval: "daily"\n - package-ecosystem: "npm"\n directory: "/app/vmui/packages/vmui"\n schedule:\n interval: "weekly"\n open-pull-requests-limit: 0\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\dependabot.yml
|
dependabot.yml
|
YAML
| 742 | 0.7 | 0 | 0 |
vue-tools
| 410 |
2025-03-25T06:51:46.871807
|
GPL-3.0
| false |
1811337cef99099a46daaffeb8c21d5f
|
name: Bug report\ndescription: Create a report to help us improve\nlabels: [bug]\nbody:\n - type: markdown\n attributes:\n value: |\n Before filling a bug report it would be great to [upgrade](https://docs.victoriametrics.com/#how-to-upgrade) \n to [the latest available release](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/latest)\n and verify whether the bug is reproducible there.\n It's also recommended to read the [troubleshooting docs](https://docs.victoriametrics.com/troubleshooting/) first.\n - type: textarea\n id: describe-the-bug\n attributes:\n label: Describe the bug\n description: |\n A clear and concise description of what the bug is.\n placeholder: |\n When I do `A` VictoriaMetrics does `B`. I expect it to do `C`.\n validations:\n required: true\n - type: textarea\n id: to-reproduce\n attributes:\n label: To Reproduce\n description: |\n Steps to reproduce the behavior.\n If reproducing an issue requires some specific configuration file, please paste it here.\n placeholder: |\n Steps to reproduce the behavior.\n validations:\n required: true\n - type: textarea\n id: version\n attributes:\n label: Version\n description: |\n The line returned when passing `--version` command line flag to the binary. For example:\n ```\n $ ./victoria-metrics-prod --version\n victoria-metrics-20190730-121249-heads-single-node-0-g671d9e55\n ```\n validations:\n required: true\n - type: textarea\n id: logs\n attributes:\n label: Logs\n description: |\n Check if any warnings or errors were logged by VictoriaMetrics components\n or components in communication with VictoriaMetrics (e.g. Prometheus, Grafana).\n validations:\n required: false\n - type: textarea\n id: screenshots\n attributes:\n label: Screenshots\n description: |\n If applicable, add screenshots to help explain your problem.\n \n For VictoriaMetrics health-state issues please provide full-length screenshots\n of Grafana dashboards if possible:\n * [Grafana dashboard for single-node VictoriaMetrics](https://grafana.com/grafana/dashboards/10229)\n * [Grafana dashboard for VictoriaMetrics cluster](https://grafana.com/grafana/dashboards/11176)\n \n See how to setup monitoring here:\n * [monitoring for single-node VictoriaMetrics](https://docs.victoriametrics.com/#monitoring)\n * [monitoring for VictoriaMetrics cluster](https://docs.victoriametrics.com/cluster-victoriametrics/#monitoring)\n validations:\n required: false\n - type: textarea\n id: flags\n attributes:\n label: Used command-line flags\n description: |\n Please provide the command-line flags used for running VictoriaMetrics and its components.\n validations:\n required: false\n - type: textarea\n id: additional-info\n attributes:\n label: Additional information\n placeholder: |\n Additional information that doesn't fit elsewhere\n validations:\n required: false\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\ISSUE_TEMPLATE\bug_report.yml
|
bug_report.yml
|
YAML
| 3,142 | 0.95 | 0.081395 | 0.047619 |
react-lib
| 219 |
2024-02-15T19:20:07.594716
|
Apache-2.0
| false |
e22ea3a493deb640d48de921f8478238
|
blank_issues_enabled: true\ncontact_links:\n - name: Ask on Slack\n url: https://slack.victoriametrics.com/\n about: You can ask for help here!\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\ISSUE_TEMPLATE\configuration.yml
|
configuration.yml
|
YAML
| 147 | 0.8 | 0.2 | 0 |
node-utils
| 668 |
2025-05-21T06:06:00.620411
|
GPL-3.0
| false |
6ba0a56fe4bb8e5de5e877a8f5cb091a
|
name: Feature request\ndescription: Suggest an idea for this project\nlabels: [enhancement]\nbody:\n - type: textarea\n id: describe-the-problem\n attributes:\n label: Is your feature request related to a problem? Please describe\n description: |\n A clear and concise description of what the problem is.\n placeholder: |\n Ex. I'm always frustrated when [...]\n validations:\n required: false\n - type: textarea\n id: describe-the-solution\n attributes:\n label: Describe the solution you'd like\n description: |\n A clear and concise description of what you want to happen.\n validations:\n required: true\n - type: textarea\n id: alternative-solutions\n attributes:\n label: Describe alternatives you've considered\n description: |\n A clear and concise description of any alternative solutions or features you've considered.\n placeholder: |\n I have tried to do `A`, but that doesn't solve a problem completely.\n I have tried to do `A` and `B`, but implementing this would be better.\n validations:\n required: false\n - type: textarea\n id: feature-additional-info\n attributes:\n label: Additional information\n description: |\n Additional information which you consider helpful for implementing this feature.\n placeholder: |\n Add any other context or screenshots about the feature request here.\n validations:\n required: false\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\ISSUE_TEMPLATE\feature_request.yml
|
feature_request.yml
|
YAML
| 1,468 | 0.85 | 0.046512 | 0 |
node-utils
| 168 |
2023-08-11T10:34:13.061442
|
BSD-3-Clause
| false |
ac1ff4f0df67b59b79a4a3072dc5dbbb
|
name: Question\ndescription: Ask a question regarding VictoriaMetrics or its components\nlabels: [question]\nbody:\n - type: textarea\n id: describe-the-component\n attributes:\n label: Is your question request related to a specific component?\n placeholder: |\n VictoriaMetrics, vmagent, vmalert, vmui, etc...\n validations:\n required: false\n - type: textarea\n id: describe-the-question\n attributes:\n label: Describe the question in detail\n description: |\n A clear and concise description of the issue and the question.\n validations:\n required: true\n - type: checkboxes\n id: troubleshooting\n attributes:\n label: Troubleshooting docs\n description: I am familiar with the following troubleshooting docs\n options:\n - label: General - https://docs.victoriametrics.com/troubleshooting/\n required: false\n - label: vmagent - https://docs.victoriametrics.com/vmagent/#troubleshooting\n required: false\n - label: vmalert - https://docs.victoriametrics.com/vmalert/#troubleshooting\n required: false\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\ISSUE_TEMPLATE\question.yml
|
question.yml
|
YAML
| 1,113 | 0.95 | 0 | 0 |
vue-tools
| 411 |
2024-03-02T13:51:22.245373
|
GPL-3.0
| false |
583903316d9e60aeba0f411374401b3c
|
name: build\n\non:\n push:\n branches:\n - cluster\n - master\n paths:\n - '**.go'\n - '**/Dockerfile*' # The trailing * is for app/vmui/Dockerfile-*.\n - '**/Makefile'\n pull_request:\n branches:\n - cluster\n - master\n paths:\n - '**.go'\n - '**/Dockerfile*' # The trailing * is for app/vmui/Dockerfile-*.\n - '**/Makefile'\n\npermissions:\n contents: read\n\nconcurrency:\n cancel-in-progress: true\n group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}\n\njobs:\n build:\n name: Build\n runs-on: ubuntu-latest\n steps:\n - name: Code checkout\n uses: actions/checkout@v4\n\n - name: Setup Go\n id: go\n uses: actions/setup-go@v5\n with:\n go-version: stable\n cache: false\n\n - name: Cache Go artifacts\n uses: actions/cache@v4\n with:\n path: |\n ~/.cache/go-build\n ~/go/bin\n ~/go/pkg/mod\n key: go-artifacts-${{ runner.os }}-crossbuild-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', 'Makefile', 'app/**/Makefile') }}\n restore-keys: go-artifacts-${{ runner.os }}-crossbuild-\n\n - name: Run crossbuild\n run: make crossbuild\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\workflows\build.yml
|
build.yml
|
YAML
| 1,249 | 0.8 | 0.037037 | 0 |
react-lib
| 45 |
2025-06-07T09:17:59.144258
|
GPL-3.0
| false |
0ce3b06953dab0456b4a03daf487199b
|
name: license-check\non:\n push:\n paths:\n - 'vendor'\n pull_request:\n paths:\n - 'vendor'\npermissions:\n contents: read\n\njobs:\n build:\n name: Build\n runs-on: ubuntu-latest\n steps:\n - name: Code checkout\n uses: actions/checkout@master\n\n - name: Setup Go\n id: go\n uses: actions/setup-go@v5\n with:\n go-version: stable\n cache: false\n\n - name: Cache Go artifacts\n uses: actions/cache@v4\n with:\n path: |\n ~/.cache/go-build\n ~/go/pkg/mod\n ~/go/bin\n key: go-artifacts-${{ runner.os }}-check-licenses-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', 'Makefile', 'app/**/Makefile') }}\n restore-keys: go-artifacts-${{ runner.os }}-check-licenses-\n\n - name: Check License\n run: make check-licenses\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\workflows\check-licenses.yml
|
check-licenses.yml
|
YAML
| 867 | 0.8 | 0 | 0 |
node-utils
| 709 |
2024-01-05T13:43:47.625472
|
MIT
| false |
9c4f99fbeb8563f072d26b1878e8e908
|
name: 'CodeQL Go'\n\non:\n push:\n branches:\n - cluster\n - master\n paths:\n - '**.go'\n pull_request:\n branches:\n - cluster\n - master\n paths:\n - '**.go'\n\nconcurrency:\n cancel-in-progress: true\n group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}\n\njobs:\n analyze:\n name: Analyze\n runs-on: ubuntu-latest\n permissions:\n actions: read\n contents: read\n security-events: write\n\n steps:\n - name: Checkout repository\n uses: actions/checkout@v4\n\n - name: Set up Go\n id: go\n uses: actions/setup-go@v5\n with:\n cache: false\n go-version: stable\n\n - name: Cache Go artifacts\n uses: actions/cache@v4\n with:\n path: |\n ~/.cache/go-build\n ~/go/bin\n ~/go/pkg/mod\n key: go-artifacts-${{ runner.os }}-codeql-analyze-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', 'Makefile', 'app/**/Makefile') }}\n restore-keys: go-artifacts-${{ runner.os }}-codeql-analyze-\n\n - name: Initialize CodeQL\n uses: github/codeql-action/init@v3\n with:\n languages: go\n\n - name: Autobuild\n uses: github/codeql-action/autobuild@v3\n\n - name: Perform CodeQL Analysis\n uses: github/codeql-action/analyze@v3\n with:\n category: 'language:go'\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\workflows\codeql-analysis-go.yml
|
codeql-analysis-go.yml
|
YAML
| 1,404 | 0.8 | 0 | 0 |
python-kit
| 177 |
2024-03-11T22:56:10.770038
|
Apache-2.0
| false |
d66b0f5752ecccbd6600e90dd380898c
|
name: 'CodeQL JS/TS'\n\non:\n push:\n branches:\n - cluster\n - master\n paths:\n - '**.js'\n - '**.ts'\n - '**.tsx'\n pull_request:\n branches:\n - cluster\n - master\n paths:\n - '**.js'\n - '**.ts'\n - '**.tsx'\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}\n cancel-in-progress: true\n\njobs:\n analyze:\n name: Analyze\n runs-on: ubuntu-latest\n permissions:\n actions: read\n contents: read\n security-events: write\n\n steps:\n - name: Checkout repository\n uses: actions/checkout@v4\n\n - name: Initialize CodeQL\n uses: github/codeql-action/init@v3\n with:\n languages: javascript-typescript\n\n - name: Perform CodeQL Analysis\n uses: github/codeql-action/analyze@v3\n with:\n category: 'language:js/ts'\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\workflows\codeql-analysis-js-ts.yml
|
codeql-analysis-js-ts.yml
|
YAML
| 886 | 0.7 | 0 | 0 |
awesome-app
| 647 |
2024-01-03T11:17:11.487706
|
GPL-3.0
| false |
cd1f05485c522197816ac53090d0321f
|
name: main\n\non:\n push:\n branches:\n - cluster\n - master\n paths:\n - '**.go'\n pull_request:\n branches:\n - cluster\n - master\n paths:\n - '**.go'\n\npermissions:\n contents: read\n\nconcurrency:\n cancel-in-progress: true\n group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}\n\njobs:\n lint:\n name: lint\n runs-on: ubuntu-latest\n steps:\n - name: Code checkout\n uses: actions/checkout@v4\n\n - name: Setup Go\n id: go\n uses: actions/setup-go@v5\n with:\n cache: false\n go-version: stable\n\n - name: Cache Go artifacts\n uses: actions/cache@v4\n with:\n path: |\n ~/.cache/go-build\n ~/go/bin\n ~/go/pkg/mod\n key: go-artifacts-${{ runner.os }}-check-all-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', 'Makefile', 'app/**/Makefile') }}\n restore-keys: go-artifacts-${{ runner.os }}-check-all-\n\n - name: Run check-all\n run: |\n make check-all\n git diff --exit-code\n\n test:\n name: test\n needs: lint\n runs-on: ubuntu-latest\n\n strategy:\n matrix:\n scenario:\n - 'test-full'\n - 'test-full-386'\n - 'test-pure'\n\n steps:\n - name: Code checkout\n uses: actions/checkout@v4\n\n - name: Setup Go\n id: go\n uses: actions/setup-go@v5\n with:\n cache: false\n go-version: stable\n\n - name: Cache Go artifacts\n uses: actions/cache@v4\n with:\n path: |\n ~/.cache/go-build\n ~/go/bin\n ~/go/pkg/mod\n key: go-artifacts-${{ runner.os }}-${{ matrix.scenario }}-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', 'Makefile', 'app/**/Makefile') }}\n restore-keys: go-artifacts-${{ runner.os }}-${{ matrix.scenario }}-\n\n - name: Run tests\n run: GOGC=10 make ${{ matrix.scenario}}\n\n - name: Publish coverage\n uses: codecov/codecov-action@v5\n with:\n files: ./coverage.txt\n\n integration-test:\n name: integration-test\n needs: [lint, test]\n runs-on: ubuntu-latest\n\n steps:\n - name: Code checkout\n uses: actions/checkout@v4\n\n - name: Setup Go\n id: go\n uses: actions/setup-go@v5\n with:\n cache: false\n go-version: stable\n\n - name: Cache Go artifacts\n uses: actions/cache@v4\n with:\n path: |\n ~/.cache/go-build\n ~/go/bin\n ~/go/pkg/mod\n key: go-artifacts-${{ runner.os }}-${{ matrix.scenario }}-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', 'Makefile', 'app/**/Makefile') }}\n restore-keys: go-artifacts-${{ runner.os }}-${{ matrix.scenario }}-\n\n - name: Run integration tests\n run: make integration-test\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\.github\workflows\main.yml
|
main.yml
|
YAML
| 2,899 | 0.8 | 0 | 0 |
node-utils
| 63 |
2024-07-07T06:08:10.352402
|
Apache-2.0
| false |
5460a8e309e223f41a04f394365084b1
|
# route requests between VictoriaMetrics and VictoriaLogs\nunauthorized_user:\n url_map:\n - src_paths:\n - "/api/v1/.*"\n url_prefix: http://victoriametrics:8428\n - src_paths:\n - "/select/.*"\n url_prefix:\n - http://vlselect-1:9428\n - http://vlselect-2:9428\n - src_paths:\n - "/insert/.*"\n url_prefix:\n - http://vlinsert:9428\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\auth-vl-cluster.yml
|
auth-vl-cluster.yml
|
YAML
| 351 | 0.8 | 0 | 0.066667 |
awesome-app
| 931 |
2025-01-10T22:31:52.088056
|
Apache-2.0
| false |
bff044733392ca76a68406c13a8a7eb2
|
# route requests between VictoriaMetrics and VictoriaLogs\nunauthorized_user:\n url_map:\n - src_paths:\n - "/api/v1/.*"\n url_prefix: http://victoriametrics:8428\n - src_paths:\n - "/select/.*"\n url_prefix:\n - http://victorialogs:9428\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\auth-vl-single.yml
|
auth-vl-single.yml
|
YAML
| 247 | 0.8 | 0 | 0.1 |
python-kit
| 521 |
2024-04-16T18:35:08.177025
|
Apache-2.0
| false |
50bdba3cb14be10a597a1de10e33b571
|
# balance load among vmselects\n# see https://docs.victoriametrics.com/vmauth/#load-balancing\nunauthorized_user:\n url_map:\n - src_paths:\n - "/select/.*"\n url_prefix:\n - http://vmselect-1:8481\n - http://vmselect-2:8481\n - src_paths:\n - "/insert/.*"\n url_prefix:\n - http://vminsert-1:8480\n - http://vminsert-2:8480\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\auth-vm-cluster.yml
|
auth-vm-cluster.yml
|
YAML
| 339 | 0.8 | 0 | 0.142857 |
awesome-app
| 995 |
2023-08-07T23:45:43.849060
|
BSD-3-Clause
| false |
67ab157570f7f500300f6a6e766b7334
|
services:\n # Grafana instance configured with VictoriaLogs as datasource\n grafana:\n image: grafana/grafana:11.5.0\n depends_on:\n - "victoriametrics"\n - "vmauth"\n ports:\n - 3000:3000\n volumes:\n - grafanadata:/var/lib/grafana\n - ./provisioning/datasources/victoriametrics-logs-datasource/cluster.yml:/etc/grafana/provisioning/datasources/cluster.yml\n - ./provisioning/dashboards:/etc/grafana/provisioning/dashboards\n - ./provisioning/plugins/:/var/lib/grafana/plugins\n - ./../../dashboards/victoriametrics.json:/var/lib/grafana/dashboards/vm.json\n - ./../../dashboards/victorialogs-cluster.json:/var/lib/grafana/dashboards/vl.json\n environment:\n - "GF_INSTALL_PLUGINS=victoriametrics-logs-datasource"\n restart: always\n\n # vector is logs collector. It collects logs according to vector.yml\n # and forwards them to VictoriaLogs\n vector:\n image: docker.io/timberio/vector:0.46.X-distroless-libc\n volumes:\n - type: bind\n source: /var/run/docker.sock\n target: /var/run/docker.sock\n - type: bind\n source: /var/lib/docker\n target: /var/lib/docker\n - ./vector-vl-cluster.yml:/etc/vector/vector.yaml:ro\n depends_on: [vmauth]\n ports:\n - "8686:8686"\n user: root\n\n vlinsert:\n image: victoriametrics/victoria-logs:v1.20.0-victorialogs\n command:\n - "--storageNode=vlstorage-1:9428"\n - "--storageNode=vlstorage-2:9428"\n\n vlselect-1:\n image: victoriametrics/victoria-logs:v1.20.0-victorialogs\n command:\n - "--storageNode=vlstorage-1:9428"\n - "--storageNode=vlstorage-2:9428"\n vlselect-2:\n image: victoriametrics/victoria-logs:v1.20.0-victorialogs\n command:\n - "--storageNode=vlstorage-1:9428"\n - "--storageNode=vlstorage-2:9428"\n\n vlstorage-1:\n image: victoriametrics/victoria-logs:v1.20.0-victorialogs\n command:\n - "--storageDataPath=/vlogs"\n volumes:\n - vldata-1:/vlogs\n vlstorage-2:\n image: victoriametrics/victoria-logs:v1.20.0-victorialogs\n command:\n - "--storageDataPath=/vlogs"\n volumes:\n - vldata-2:/vlogs\n\n # VictoriaMetrics instance, a single process responsible for\n # scraping, storing metrics and serve read requests.\n victoriametrics:\n image: victoriametrics/victoria-metrics:v1.115.0\n volumes:\n - vmdata:/storage\n - ./prometheus-vl-cluster.yml:/etc/prometheus/prometheus.yml\n command:\n - "--storageDataPath=/storage"\n - "--promscrape.config=/etc/prometheus/prometheus.yml"\n restart: always\n\n # vmauth is a router and balancer for HTTP requests.\n # It proxies query requests from vmalert to either VictoriaMetrics or VictoriaLogs,\n # depending on the requested path.\n vmauth:\n image: victoriametrics/vmauth:v1.115.0\n depends_on:\n - "victoriametrics"\n - "vlselect-1"\n - "vlselect-2"\n - "vlinsert"\n volumes:\n - ./auth-vl-cluster.yml:/etc/auth.yml\n command:\n - "--auth.config=/etc/auth.yml"\n ports:\n - 8427:8427\n restart: always\n\n # vmalert executes alerting and recording rules according to given rule type.\n vmalert:\n image: victoriametrics/vmalert:v1.115.0\n depends_on:\n - "vmauth"\n - "alertmanager"\n - "victoriametrics"\n ports:\n - 8880:8880\n volumes:\n - ./rules/alerts.yml:/etc/alerts/alerts.yml\n - ./rules/alerts-vlogs.yml:/etc/alerts/vlogs.yml\n - ./rules/alerts-health.yml:/etc/alerts/alerts-health.yml\n - ./rules/alerts-vmagent.yml:/etc/alerts/alerts-vmagent.yml\n - ./rules/alerts-vmalert.yml:/etc/alerts/alerts-vmalert.yml\n # vlogs rule\n - ./rules/vlogs-example-alerts.yml:/etc/alerts/vlogs-example-alerts.yml\n command:\n - "--datasource.url=http://vmauth:8427/"\n - "--remoteRead.url=http://victoriametrics:8428/"\n - "--remoteWrite.url=http://victoriametrics:8428/"\n - "--notifier.url=http://alertmanager:9093/"\n - "--rule=/etc/alerts/*.yml"\n # display source of alerts in grafana\n - "--external.url=http://127.0.0.1:3000" #grafana outside container\n restart: always\n\n # alertmanager receives alerting notifications from vmalert\n # and distributes them according to --config.file.\n alertmanager:\n image: prom/alertmanager:v0.28.0\n volumes:\n - ./alertmanager.yml:/config/alertmanager.yml\n command:\n - "--config.file=/config/alertmanager.yml"\n ports:\n - 9093:9093\n restart: always\n\nvolumes:\n vmdata: {}\n vldata-1: {}\n vldata-2: {}\n grafanadata: {}\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\compose-vl-cluster.yml
|
compose-vl-cluster.yml
|
YAML
| 4,511 | 0.8 | 0.014184 | 0.098485 |
react-lib
| 904 |
2025-04-18T19:05:23.910113
|
Apache-2.0
| false |
e35cb093dee45fabd710d2dffdcbd035
|
services:\n # Grafana instance configured with VictoriaLogs as datasource\n grafana:\n image: grafana/grafana:11.5.0\n depends_on:\n - "victoriametrics"\n - "victorialogs"\n ports:\n - 3000:3000\n volumes:\n - grafanadata:/var/lib/grafana\n - ./provisioning/datasources/victoriametrics-logs-datasource/single.yml:/etc/grafana/provisioning/datasources/single.yml\n - ./provisioning/dashboards:/etc/grafana/provisioning/dashboards\n - ./provisioning/plugins/:/var/lib/grafana/plugins\n - ./../../dashboards/victoriametrics.json:/var/lib/grafana/dashboards/vm.json\n - ./../../dashboards/victorialogs.json:/var/lib/grafana/dashboards/vl.json\n environment:\n - "GF_INSTALL_PLUGINS=victoriametrics-logs-datasource"\n restart: always\n\n # vector is logs collector. It collects logs according to vector.yml\n # and forwards them to VictoriaLogs\n vector:\n image: docker.io/timberio/vector:0.46.X-distroless-libc\n volumes:\n - type: bind\n source: /var/run/docker.sock\n target: /var/run/docker.sock\n - type: bind\n source: /var/lib/docker\n target: /var/lib/docker\n - ./vector-vl-single.yml:/etc/vector/vector.yaml:ro\n depends_on: [victorialogs]\n ports:\n - "8686:8686"\n user: root\n\n # VictoriaLogs instance, a single process responsible for\n # storing logs and serving read queries.\n victorialogs:\n image: victoriametrics/victoria-logs:v1.20.0-victorialogs\n command:\n - "--storageDataPath=/vlogs"\n volumes:\n - vldata:/vlogs\n\n # VictoriaMetrics instance, a single process responsible for\n # scraping, storing metrics and serve read requests.\n victoriametrics:\n image: victoriametrics/victoria-metrics:v1.115.0\n volumes:\n - vmdata:/storage\n - ./prometheus-vl-single.yml:/etc/prometheus/prometheus.yml\n command:\n - "--storageDataPath=/storage"\n - "--promscrape.config=/etc/prometheus/prometheus.yml"\n restart: always\n\n # vmauth is a router and balancer for HTTP requests.\n # It proxies query requests from vmalert to either VictoriaMetrics or VictoriaLogs,\n # depending on the requested path.\n vmauth:\n image: victoriametrics/vmauth:v1.115.0\n depends_on:\n - "victoriametrics"\n - "victorialogs"\n volumes:\n - ./auth-vl-single.yml:/etc/auth.yml\n command:\n - "--auth.config=/etc/auth.yml"\n ports:\n - 8427:8427\n restart: always\n\n # vmalert executes alerting and recording rules according to the given rule type.\n vmalert:\n image: victoriametrics/vmalert:v1.115.0\n depends_on:\n - "vmauth"\n - "alertmanager"\n - "victoriametrics"\n ports:\n - 8880:8880\n volumes:\n - ./rules/alerts.yml:/etc/alerts/alerts.yml\n - ./rules/alerts-vlogs.yml:/etc/alerts/vlogs.yml\n - ./rules/alerts-health.yml:/etc/alerts/alerts-health.yml\n - ./rules/alerts-vmalert.yml:/etc/alerts/alerts-vmalert.yml\n # vlogs rule\n - ./rules/vlogs-example-alerts.yml:/etc/alerts/vlogs-example-alerts.yml\n command:\n - "--datasource.url=http://vmauth:8427/"\n - "--remoteRead.url=http://victoriametrics:8428/"\n - "--remoteWrite.url=http://victoriametrics:8428/"\n - "--notifier.url=http://alertmanager:9093/"\n - "--rule=/etc/alerts/*.yml"\n # display source of alerts in grafana\n - "--external.url=http://127.0.0.1:3000" #grafana outside container\n restart: always\n\n # alertmanager receives alerting notifications from vmalert\n # and distributes them according to --config.file.\n alertmanager:\n image: prom/alertmanager:v0.28.0\n volumes:\n - ./alertmanager.yml:/config/alertmanager.yml\n command:\n - "--config.file=/config/alertmanager.yml"\n ports:\n - 9093:9093\n restart: always\n\nvolumes:\n vmdata: {}\n vldata: {}\n grafanadata: {}\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\compose-vl-single.yml
|
compose-vl-single.yml
|
YAML
| 3,822 | 0.8 | 0.025862 | 0.137615 |
vue-tools
| 136 |
2023-11-22T04:22:21.730726
|
MIT
| false |
b7265d815b69630a75e3ca3484159a11
|
services:\n # Metrics collector.\n # It scrapes targets defined in --promscrape.config\n # And forward them to --remoteWrite.url\n vmagent:\n image: victoriametrics/vmagent:v1.115.0\n depends_on:\n - "vmauth"\n ports:\n - 8429:8429\n volumes:\n - vmagentdata:/vmagentdata\n - ./prometheus-vm-cluster.yml:/etc/prometheus/prometheus.yml\n command:\n - "--promscrape.config=/etc/prometheus/prometheus.yml"\n - "--remoteWrite.url=http://vmauth:8427/insert/0/prometheus/api/v1/write"\n restart: always\n\n grafana:\n image: grafana/grafana:11.5.0\n depends_on:\n - "vmauth"\n ports:\n - 3000:3000\n restart: always\n volumes:\n - grafanadata:/var/lib/grafana\n - ./provisioning/datasources/prometheus-datasource/cluster.yml:/etc/grafana/provisioning/datasources/cluster.yml\n - ./provisioning/dashboards:/etc/grafana/provisioning/dashboards\n - ./../../dashboards/victoriametrics-cluster.json:/var/lib/grafana/dashboards/vm.json\n - ./../../dashboards/vmagent.json:/var/lib/grafana/dashboards/vmagent.json\n - ./../../dashboards/vmalert.json:/var/lib/grafana/dashboards/vmalert.json\n - ./../../dashboards/vmauth.json:/var/lib/grafana/dashboards/vmauth.json\n\n # vmstorage shards. Each shard receives 1/N of all metrics sent to vminserts,\n # where N is number of vmstorages (2 in this case).\n vmstorage-1:\n image: victoriametrics/vmstorage:v1.115.0-cluster\n volumes:\n - strgdata-1:/storage\n command:\n - "--storageDataPath=/storage"\n restart: always\n vmstorage-2:\n image: victoriametrics/vmstorage:v1.115.0-cluster\n volumes:\n - strgdata-2:/storage\n command:\n - "--storageDataPath=/storage"\n restart: always\n\n # vminsert is ingestion frontend. It receives metrics pushed by vmagent,\n # pre-process them and distributes across configured vmstorage shards.\n vminsert-1:\n image: victoriametrics/vminsert:v1.115.0-cluster\n depends_on:\n - "vmstorage-1"\n - "vmstorage-2"\n command:\n - "--storageNode=vmstorage-1:8400"\n - "--storageNode=vmstorage-2:8400"\n restart: always\n vminsert-2:\n image: victoriametrics/vminsert:v1.115.0-cluster\n depends_on:\n - "vmstorage-1"\n - "vmstorage-2"\n command:\n - "--storageNode=vmstorage-1:8400"\n - "--storageNode=vmstorage-2:8400"\n restart: always\n\n # vmselect is a query fronted. It serves read queries in MetricsQL or PromQL.\n # vmselect collects results from configured `--storageNode` shards.\n vmselect-1:\n image: victoriametrics/vmselect:v1.115.0-cluster\n depends_on:\n - "vmstorage-1"\n - "vmstorage-2"\n command:\n - "--storageNode=vmstorage-1:8401"\n - "--storageNode=vmstorage-2:8401"\n - "--vmalert.proxyURL=http://vmalert:8880"\n restart: always\n vmselect-2:\n image: victoriametrics/vmselect:v1.115.0-cluster\n depends_on:\n - "vmstorage-1"\n - "vmstorage-2"\n command:\n - "--storageNode=vmstorage-1:8401"\n - "--storageNode=vmstorage-2:8401"\n - "--vmalert.proxyURL=http://vmalert:8880"\n restart: always\n\n # vmauth is a router and balancer for HTTP requests.\n # It is configured via --auth.config and balances\n # read requests from Grafana, vmui, vmalert among vmselects.\n # It can be used as an authentication proxy.\n vmauth:\n image: victoriametrics/vmauth:v1.115.0\n depends_on:\n - "vmselect-1"\n - "vmselect-2"\n volumes:\n - ./auth-vm-cluster.yml:/etc/auth.yml\n command:\n - "--auth.config=/etc/auth.yml"\n ports:\n - 8427:8427\n restart: always\n\n # vmalert executes alerting and recording rules\n vmalert:\n image: victoriametrics/vmalert:v1.115.0\n depends_on:\n - "vmauth"\n ports:\n - 8880:8880\n volumes:\n - ./rules/alerts-cluster.yml:/etc/alerts/alerts.yml\n - ./rules/alerts-health.yml:/etc/alerts/alerts-health.yml\n - ./rules/alerts-vmagent.yml:/etc/alerts/alerts-vmagent.yml\n - ./rules/alerts-vmalert.yml:/etc/alerts/alerts-vmalert.yml\n command:\n - "--datasource.url=http://vmauth:8427/select/0/prometheus"\n - "--remoteRead.url=http://vmauth:8427/select/0/prometheus"\n - "--remoteWrite.url=http://vmauth:8427/insert/0/prometheus/api/v1/write"\n - "--notifier.url=http://alertmanager:9093/"\n - "--rule=/etc/alerts/*.yml"\n # display source of alerts in grafana\n - "-external.url=http://127.0.0.1:3000" #grafana outside container\n - '--external.alert.source=explore?orgId=1&left={"datasource":"VictoriaMetrics","queries":[{"expr":{{.Expr|jsonEscape|queryEscape}},"refId":"A"}],"range":{"from":"{{ .ActiveAt.UnixMilli }}","to":"now"}}'\n restart: always\n\n # alertmanager receives alerting notifications from vmalert\n # and distributes them according to --config.file.\n alertmanager:\n image: prom/alertmanager:v0.28.0\n volumes:\n - ./alertmanager.yml:/config/alertmanager.yml\n command:\n - "--config.file=/config/alertmanager.yml"\n ports:\n - 9093:9093\n restart: always\n\nvolumes:\n vmagentdata: {}\n strgdata-1: {}\n strgdata-2: {}\n grafanadata: {}\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\compose-vm-cluster.yml
|
compose-vm-cluster.yml
|
YAML
| 5,102 | 0.8 | 0.006579 | 0.118056 |
node-utils
| 950 |
2023-11-28T09:04:04.292313
|
MIT
| false |
0708508524d94775993faf7ab644a3a6
|
services:\n # Metrics collector.\n # It scrapes targets defined in --promscrape.config\n # And forward them to --remoteWrite.url\n vmagent:\n image: victoriametrics/vmagent:v1.115.0\n depends_on:\n - "victoriametrics"\n ports:\n - 8429:8429\n volumes:\n - vmagentdata:/vmagentdata\n - ./prometheus-vm-single.yml:/etc/prometheus/prometheus.yml\n command:\n - "--promscrape.config=/etc/prometheus/prometheus.yml"\n - "--remoteWrite.url=http://victoriametrics:8428/api/v1/write"\n restart: always\n # VictoriaMetrics instance, a single process responsible for\n # storing metrics and serve read requests.\n victoriametrics:\n image: victoriametrics/victoria-metrics:v1.115.0\n ports:\n - 8428:8428\n - 8089:8089\n - 8089:8089/udp\n - 2003:2003\n - 2003:2003/udp\n - 4242:4242\n volumes:\n - vmdata:/storage\n command:\n - "--storageDataPath=/storage"\n - "--graphiteListenAddr=:2003"\n - "--opentsdbListenAddr=:4242"\n - "--httpListenAddr=:8428"\n - "--influxListenAddr=:8089"\n - "--vmalert.proxyURL=http://vmalert:8880"\n restart: always\n\n grafana:\n image: grafana/grafana:11.5.0\n depends_on:\n - "victoriametrics"\n ports:\n - 3000:3000\n volumes:\n - grafanadata:/var/lib/grafana\n - ./provisioning/datasources/prometheus-datasource/single.yml:/etc/grafana/provisioning/datasources/single.yml\n - ./provisioning/dashboards:/etc/grafana/provisioning/dashboards\n - ./../../dashboards/victoriametrics.json:/var/lib/grafana/dashboards/vm.json\n - ./../../dashboards/vmagent.json:/var/lib/grafana/dashboards/vmagent.json\n - ./../../dashboards/vmalert.json:/var/lib/grafana/dashboards/vmalert.json\n restart: always\n\n # vmalert executes alerting and recording rules\n vmalert:\n image: victoriametrics/vmalert:v1.115.0\n depends_on:\n - "victoriametrics"\n - "alertmanager"\n ports:\n - 8880:8880\n volumes:\n - ./rules/alerts.yml:/etc/alerts/alerts.yml\n - ./rules/alerts-health.yml:/etc/alerts/alerts-health.yml\n - ./rules/alerts-vmagent.yml:/etc/alerts/alerts-vmagent.yml\n - ./rules/alerts-vmalert.yml:/etc/alerts/alerts-vmalert.yml\n command:\n - "--datasource.url=http://victoriametrics:8428/"\n - "--remoteRead.url=http://victoriametrics:8428/"\n - "--remoteWrite.url=http://vmagent:8429/"\n - "--notifier.url=http://alertmanager:9093/"\n - "--rule=/etc/alerts/*.yml"\n # display source of alerts in grafana\n - "--external.url=http://127.0.0.1:3000" #grafana outside container\n - '--external.alert.source=explore?orgId=1&left={"datasource":"VictoriaMetrics","queries":[{"expr":{{.Expr|jsonEscape|queryEscape}},"refId":"A"}],"range":{"from":"{{ .ActiveAt.UnixMilli }}","to":"now"}}'\n restart: always\n\n # alertmanager receives alerting notifications from vmalert\n # and distributes them according to --config.file.\n alertmanager:\n image: prom/alertmanager:v0.28.0\n volumes:\n - ./alertmanager.yml:/config/alertmanager.yml\n command:\n - "--config.file=/config/alertmanager.yml"\n ports:\n - 9093:9093\n restart: always\n\nvolumes:\n vmagentdata: {}\n vmdata: {}\n grafanadata: {}\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\compose-vm-single.yml
|
compose-vm-single.yml
|
YAML
| 3,230 | 0.8 | 0.010638 | 0.1 |
python-kit
| 387 |
2025-03-15T08:45:12.846449
|
MIT
| false |
9c0d3537678c45ed69768df12e21a50b
|
global:\n scrape_interval: 10s\n\nscrape_configs:\n- job_name: victoriametrics\n static_configs:\n - targets:\n - victoriametrics:8428\n- job_name: vmalert\n static_configs:\n - targets:\n - vmalert:8880\n- job_name: vlstorage\n static_configs:\n - targets:\n - vlstorage-1:9428\n - vlstorage-2:9428\n- job_name: vlselect\n static_configs:\n - targets:\n - vlselect-1:9428\n - vlselect-2:9428\n- job_name: vlinsert\n static_configs:\n - targets:\n - vlinsert:9428\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\prometheus-vl-cluster.yml
|
prometheus-vl-cluster.yml
|
YAML
| 471 | 0.7 | 0 | 0 |
python-kit
| 185 |
2024-07-27T14:10:11.146970
|
Apache-2.0
| false |
74aafbc385ad66953afeb202364a03b9
|
global:\n scrape_interval: 10s\n\nscrape_configs:\n- job_name: victoriametrics\n static_configs:\n - targets:\n - victoriametrics:8428\n- job_name: vmalert\n static_configs:\n - targets:\n - vmalert:8880\n- job_name: victorialogs\n static_configs:\n - targets:\n - victorialogs:9428
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\prometheus-vl-single.yml
|
prometheus-vl-single.yml
|
YAML
| 283 | 0.7 | 0 | 0 |
node-utils
| 685 |
2023-07-13T18:18:34.798369
|
BSD-3-Clause
| false |
9e7a56e9186abaa8abc2f73acde023cd
|
global:\n scrape_interval: 10s\n\nscrape_configs:\n- job_name: vmagent\n static_configs:\n - targets:\n - vmagent:8429\n- job_name: vmauth\n static_configs:\n - targets:\n - vmauth:8427\n- job_name: vmalert\n static_configs:\n - targets:\n - vmalert:8880\n- job_name: vminsert\n static_configs:\n - targets:\n - vminsert:8480\n- job_name: vmselect\n static_configs:\n - targets:\n - vmselect-1:8481\n - vmselect-2:8481\n- job_name: vmstorage\n static_configs:\n - targets:\n - vmstorage-1:8482\n - vmstorage-2:8482\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\prometheus-vm-cluster.yml
|
prometheus-vm-cluster.yml
|
YAML
| 523 | 0.7 | 0 | 0 |
react-lib
| 17 |
2025-03-11T15:29:59.668617
|
MIT
| false |
0dafa46e83741942c254c37474661b1d
|
global:\n scrape_interval: 10s\n\nscrape_configs:\n- job_name: vmagent\n static_configs:\n - targets:\n - vmagent:8429\n- job_name: vmalert\n static_configs:\n - targets:\n - vmalert:8880\n- job_name: victoriametrics\n static_configs:\n - targets:\n - victoriametrics:8428\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\prometheus-vm-single.yml
|
prometheus-vm-single.yml
|
YAML
| 274 | 0.7 | 0 | 0 |
vue-tools
| 378 |
2023-10-29T06:43:16.770825
|
Apache-2.0
| false |
1194ff19ccf9c038bd0a88233f0290cd
|
api:\n enabled: true\n address: 0.0.0.0:8686\nsources:\n docker:\n type: docker_logs\n demo:\n type: demo_logs\n format: apache_common\n interval: 10\n vector_metrics:\n type: internal_metrics\ntransforms:\n msg_parser:\n type: remap\n inputs: [docker]\n source: |\n .message = parse_json(.message) ?? .message\nsinks:\n elasticsearch:\n type: elasticsearch\n inputs: [demo, msg_parser]\n endpoints: [http://vmauth:8427/insert/elasticsearch/]\n mode: bulk\n api_version: v8\n compression: gzip\n healthcheck:\n enabled: false\n request:\n headers:\n VL-Stream-Fields: source_type,label.com.docker.compose.service\n VL-Time-Field: timestamp\n VL-Msg-Field: message,msg,_msg,message.message,message.log\n AccountID: "0"\n ProjectID: "0"\n victoriametrics:\n type: prometheus_remote_write\n endpoint: http://victoriametrics:8428/api/v1/write\n inputs: [vector_metrics]\n healthcheck:\n enabled: false\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\vector-vl-cluster.yml
|
vector-vl-cluster.yml
|
YAML
| 981 | 0.8 | 0 | 0 |
awesome-app
| 859 |
2024-06-16T05:01:24.159495
|
GPL-3.0
| false |
f85c1ec2d2cf4ea8c70ca0bdb0dd84a9
|
api:\n enabled: true\n address: 0.0.0.0:8686\nsources:\n docker:\n type: docker_logs\n demo:\n type: demo_logs\n format: apache_common\n interval: 10\n vector_metrics:\n type: internal_metrics\ntransforms:\n msg_parser:\n type: remap\n inputs: [docker]\n source: |\n .message = parse_json(.message) ?? .message\nsinks:\n elasticsearch:\n type: elasticsearch\n inputs: [demo, msg_parser]\n endpoints: [http://victorialogs:9428/insert/elasticsearch/]\n mode: bulk\n api_version: v8\n compression: gzip\n healthcheck:\n enabled: false\n request:\n headers:\n VL-Stream-Fields: source_type,label.com.docker.compose.service\n VL-Time-Field: timestamp\n VL-Msg-Field: message,msg,_msg,message.message,message.log\n AccountID: "0"\n ProjectID: "0"\n victoriametrics:\n type: prometheus_remote_write\n endpoint: http://victoriametrics:8428/api/v1/write\n inputs: [vector_metrics]\n healthcheck:\n enabled: false\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\vector-vl-single.yml
|
vector-vl-single.yml
|
YAML
| 987 | 0.8 | 0 | 0 |
vue-tools
| 869 |
2024-11-02T06:20:03.814873
|
GPL-3.0
| false |
886113101064d06af51d9081547f79be
|
apiVersion: 1\n\nproviders:\n- name: Prometheus\n orgId: 1\n folder: ''\n type: file\n options:\n path: /var/lib/grafana/dashboards\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\provisioning\dashboards\dashboard.yml
|
dashboard.yml
|
YAML
| 131 | 0.7 | 0 | 0 |
awesome-app
| 447 |
2025-03-12T16:51:16.182874
|
BSD-3-Clause
| false |
c14c98b75b62908ab3af2d68d3070756
|
apiVersion: 1\n\ndatasources:\n - name: VictoriaMetrics - cluster\n type: prometheus\n access: proxy\n url: http://vmauth:8427/select/0/prometheus\n isDefault: true\n jsonData:\n prometheusType: Prometheus\n prometheusVersion: 2.24.0\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\provisioning\datasources\prometheus-datasource\cluster.yml
|
cluster.yml
|
YAML
| 266 | 0.8 | 0 | 0 |
python-kit
| 478 |
2024-11-25T17:01:38.570509
|
GPL-3.0
| false |
0afdeabfd729425c9ae3a9cf0187d8ff
|
apiVersion: 1\n\ndatasources:\n - name: VictoriaMetrics\n type: prometheus\n access: proxy\n url: http://victoriametrics:8428\n isDefault: true\n jsonData:\n prometheusType: Prometheus\n prometheusVersion: 2.24.0
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\provisioning\datasources\prometheus-datasource\single.yml
|
single.yml
|
YAML
| 244 | 0.8 | 0 | 0 |
react-lib
| 556 |
2023-12-11T10:38:13.387736
|
MIT
| false |
3a7a4cf95fc376f1a7e608b5ffeffdd4
|
apiVersion: 1\n\ndatasources:\n - name: VictoriaLogs\n type: victoriametrics-logs-datasource\n access: proxy\n url: http://vmauth:8427\n\n - name: VictoriaMetrics\n type: prometheus\n access: proxy\n url: http://vmauth:8427\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\provisioning\datasources\victoriametrics-logs-datasource\cluster.yml
|
cluster.yml
|
YAML
| 234 | 0.8 | 0 | 0 |
awesome-app
| 659 |
2024-11-15T04:03:35.835220
|
BSD-3-Clause
| false |
fd7fde73c4e6b49c52b9b0d7b02598a6
|
apiVersion: 1\n\ndatasources:\n - name: VictoriaLogs\n type: victoriametrics-logs-datasource\n access: proxy\n url: http://vmauth:8427/\n\n - name: VictoriaMetrics\n type: prometheus\n access: proxy\n url: http://vmauth:8427/\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\provisioning\datasources\victoriametrics-logs-datasource\single.yml
|
single.yml
|
YAML
| 236 | 0.8 | 0 | 0 |
node-utils
| 639 |
2024-09-08T20:05:48.554468
|
BSD-3-Clause
| false |
68dcdc5c75b1bda360d9e87a4465d9fe
|
# File contains default list of alerts for VictoriaMetrics cluster.\n# The alerts below are just recommendations and may require some updates\n# and threshold calibration according to every specific setup.\ngroups:\n # Alerts group for VM cluster assumes that Grafana dashboard\n # https://grafana.com/grafana/dashboards/11176 is installed.\n # Please, update the `dashboard` annotation according to your setup.\n - name: vmcluster\n interval: 30s\n concurrency: 2\n rules:\n - alert: DiskRunsOutOfSpaceIn3Days\n expr: |\n sum(vm_free_disk_space_bytes) without(path) /\n (\n rate(vm_rows_added_to_storage_total[1d]) * (\n sum(vm_data_size_bytes{type!~"indexdb.*"}) without(type) /\n sum(vm_rows{type!~"indexdb.*"}) without(type)\n )\n ) < 3 * 24 * 3600 > 0\n for: 30m\n labels:\n severity: critical\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=20&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} will run out of disk space in 3 days"\n description: "Taking into account current ingestion rate, free disk space will be enough only\n for {{ $value | humanizeDuration }} on instance {{ $labels.instance }}.\n\n Consider to limit the ingestion rate, decrease retention or scale the disk space up if possible."\n\n - alert: NodeBecomesReadonlyIn3Days\n expr: |\n sum(vm_free_disk_space_bytes - vm_free_disk_space_limit_bytes) without(path) /\n (\n rate(vm_rows_added_to_storage_total[1d]) * (\n sum(vm_data_size_bytes{type!~"indexdb.*"}) without(type) /\n sum(vm_rows{type!~"indexdb.*"}) without(type)\n )\n ) < 3 * 24 * 3600 > 0\n for: 30m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=20&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} will become read-only in 3 days"\n description: "Taking into account current ingestion rate, free disk space and -storage.minFreeDiskSpaceBytes\n instance {{ $labels.instance }} will remain writable for {{ $value | humanizeDuration }}.\n\n Consider to limit the ingestion rate, decrease retention or scale the disk space up if possible."\n\n - alert: DiskRunsOutOfSpace\n expr: |\n sum(vm_data_size_bytes) by(job, instance) /\n (\n sum(vm_free_disk_space_bytes) by(job, instance) +\n sum(vm_data_size_bytes) by(job, instance)\n ) > 0.8\n for: 30m\n labels:\n severity: critical\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=20&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} (job={{ $labels.job }}) will run out of disk space soon"\n description: "Disk utilisation on instance {{ $labels.instance }} is more than 80%.\n\n Having less than 20% of free disk space could cripple merges processes and overall performance.\n Consider to limit the ingestion rate, decrease retention or scale the disk space if possible."\n\n - alert: RequestErrorsToAPI\n expr: increase(vm_http_request_errors_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n show_at: dashboard\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=52&var-instance={{ $labels.instance }}"\n summary: "Too many errors served for {{ $labels.job }} path {{ $labels.path }} (instance {{ $labels.instance }})"\n description: "Requests to path {{ $labels.path }} are receiving errors.\n Please verify if clients are sending correct requests."\n\n - alert: RPCErrors\n expr: |\n (\n sum(increase(vm_rpc_connection_errors_total[5m])) by(job, instance)\n +\n sum(increase(vm_rpc_dial_errors_total[5m])) by(job, instance)\n +\n sum(increase(vm_rpc_handshake_errors_total[5m])) by(job, instance)\n ) > 0\n for: 15m\n labels:\n severity: warning\n show_at: dashboard\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=44&var-instance={{ $labels.instance }}"\n summary: "Too many RPC errors for {{ $labels.job }} (instance {{ $labels.instance }})"\n description: "RPC errors are interconnection errors between cluster components.\n\n Possible reasons for errors are misconfiguration, overload, network blips or unreachable components."\n\n - alert: TooHighChurnRate\n expr: |\n (\n sum(rate(vm_new_timeseries_created_total[5m])) by(job)\n /\n sum(rate(vm_rows_inserted_total[5m])) by(job)\n ) > 0.1\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=102"\n summary: "Churn rate is more than 10% for the last 15m"\n description: "VM constantly creates new time series.\n\n This effect is known as Churn Rate.\n\n High Churn Rate tightly connected with database performance and may\n result in unexpected OOM's or slow queries."\n\n - alert: TooHighChurnRate24h\n expr: |\n sum(increase(vm_new_timeseries_created_total[24h])) by(job)\n >\n (sum(vm_cache_entries{type="storage/hour_metric_ids"}) by(job) * 3)\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=102"\n summary: "Too high number of new series created over last 24h"\n description: "The number of created new time series over last 24h is 3x times higher than\n current number of active series.\n\n This effect is known as Churn Rate.\n\n High Churn Rate tightly connected with database performance and may\n result in unexpected OOM's or slow queries."\n\n - alert: TooHighSlowInsertsRate\n expr: |\n (\n sum(rate(vm_slow_row_inserts_total[5m])) by(job)\n /\n sum(rate(vm_rows_inserted_total[5m])) by(job)\n ) > 0.05\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=108"\n summary: "Percentage of slow inserts is more than 5% for the last 15m"\n description: "High rate of slow inserts may be a sign of resource exhaustion\n for the current load. It is likely more RAM is needed for optimal handling of the current number of active time series.\n See also https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3976#issuecomment-1476883183"\n\n - alert: VminsertVmstorageConnectionIsSaturated\n expr: rate(vm_rpc_send_duration_seconds_total[5m]) > 0.9\n for: 15m\n labels:\n severity: warning\n show_at: dashboard\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=139&var-instance={{ $labels.instance }}"\n summary: "Connection between vminsert on {{ $labels.instance }} and vmstorage on {{ $labels.addr }} is saturated"\n description: "The connection between vminsert (instance {{ $labels.instance }}) and vmstorage (instance {{ $labels.addr }})\n is saturated by more than 90% and vminsert won't be able to keep up.\n\n This usually means that more vminsert or vmstorage nodes must be added to the cluster in order to increase\n the total number of vminsert -> vmstorage links."\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts-cluster.yml
|
alerts-cluster.yml
|
YAML
| 7,828 | 0.95 | 0.148148 | 0.038961 |
vue-tools
| 774 |
2024-08-05T19:25:44.105793
|
BSD-3-Clause
| false |
0d114bfde1608653a1934cf6e02508bf
|
# File contains default list of alerts for various VM components.\n# The following alerts are recommended for use for any VM installation.\n# The alerts below are just recommendations and may require some updates\n# and threshold calibration according to every specific setup.\ngroups:\n - name: vm-health\n # note the `job` filter and update accordingly to your setup\n rules:\n - alert: TooManyRestarts\n expr: changes(process_start_time_seconds{job=~".*(victoriametrics|vmselect|vminsert|vmstorage|vmagent|vmalert|vmsingle|vmalertmanager|vmauth|victorialogs|vlstorage|vlselect|vlinsert).*"}[15m]) > 2\n labels:\n severity: critical\n annotations:\n summary: "{{ $labels.job }} too many restarts (instance {{ $labels.instance }})"\n description: >\n Job {{ $labels.job }} (instance {{ $labels.instance }}) has restarted more than twice in the last 15 minutes.\n It might be crashlooping.\n\n - alert: ServiceDown\n expr: up{job=~".*(victoriametrics|vmselect|vminsert|vmstorage|vmagent|vmalert|vmsingle|vmalertmanager|vmauth|victorialogs|vlstorage|vlselect|vlinsert).*"} == 0\n for: 2m\n labels:\n severity: critical\n annotations:\n summary: "Service {{ $labels.job }} is down on {{ $labels.instance }}"\n description: "{{ $labels.instance }} of job {{ $labels.job }} has been down for more than 2 minutes."\n\n - alert: ProcessNearFDLimits\n expr: (process_max_fds - process_open_fds) < 100\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "Number of free file descriptors is less than 100 for \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") for the last 5m"\n description: | \n Exhausting OS file descriptors limit can cause severe degradation of the process.\n Consider to increase the limit as fast as possible.\n\n - alert: TooHighMemoryUsage\n expr: (min_over_time(process_resident_memory_anon_bytes[10m]) / vm_available_memory_bytes) > 0.8\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "It is more than 80% of memory used by \"{{ $labels.job }}\"(\"{{ $labels.instance }}\")"\n description: |\n Too high memory usage may result into multiple issues such as OOMs or degraded performance.\n Consider to either increase available memory or decrease the load on the process.\n\n - alert: TooHighCPUUsage\n expr: rate(process_cpu_seconds_total[5m]) / process_cpu_cores_available > 0.9\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "More than 90% of CPU is used by \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") during the last 5m"\n description: >\n Too high CPU usage may be a sign of insufficient resources and make process unstable.\n Consider to either increase available CPU resources or decrease the load on the process.\n\n - alert: TooHighGoroutineSchedulingLatency\n expr: histogram_quantile(0.99, sum(rate(go_sched_latencies_seconds_bucket[5m])) by (le, job, instance)) > 0.1\n for: 15m\n labels:\n severity: critical\n annotations:\n summary: "\"{{ $labels.job }}\"(\"{{ $labels.instance }}\") has insufficient CPU resources for >15m"\n description: >\n Go runtime is unable to schedule goroutines execution in acceptable time. This is usually a sign of\n insufficient CPU resources or CPU throttling. Verify that service has enough CPU resources. Otherwise,\n the service could work unreliably with delays in processing.\n\n - alert: TooManyLogs\n expr: sum(increase(vm_log_messages_total{level="error"}[5m])) without (app_version, location) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "Too many logs printed for job \"{{ $labels.job }}\" ({{ $labels.instance }})"\n description: >\n Logging rate for job \"{{ $labels.job }}\" ({{ $labels.instance }}) is {{ $value }} for last 15m.\n Worth to check logs for specific error messages.\n\n - alert: TooManyTSIDMisses\n expr: rate(vm_missing_tsids_for_metric_id_total[5m]) > 0\n for: 10m\n labels:\n severity: critical\n annotations:\n summary: "Too many TSID misses for job \"{{ $labels.job }}\" ({{ $labels.instance }})"\n description: | \n The rate of TSID misses during query lookups is too high for \"{{ $labels.job }}\" ({{ $labels.instance }}).\n Make sure you're running VictoriaMetrics of v1.85.3 or higher.\n Related issue https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3502\n\n - alert: ConcurrentInsertsHitTheLimit\n expr: avg_over_time(vm_concurrent_insert_current[1m]) >= vm_concurrent_insert_capacity\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "{{ $labels.job }} on instance {{ $labels.instance }} is constantly hitting concurrent inserts limit"\n description: | \n The limit of concurrent inserts on instance {{ $labels.instance }} depends on the number of CPUs.\n Usually, when component constantly hits the limit it is likely the component is overloaded and requires more CPU.\n In some cases for components like vmagent or vminsert the alert might trigger if there are too many clients\n making write attempts. If vmagent's or vminsert's CPU usage and network saturation are at normal level, then \n it might be worth adjusting `-maxConcurrentInserts` cmd-line flag.\n\n - alert: IndexDBRecordsDrop\n expr: increase(vm_indexdb_items_dropped_total[5m]) > 0\n labels:\n severity: critical\n annotations:\n summary: "IndexDB skipped registering items during data ingestion with reason={{ $labels.reason }}."\n description: | \n VictoriaMetrics could skip registering new timeseries during ingestion if they fail the validation process. \n For example, `reason=too_long_item` means that time series cannot exceed 64KB. Please, reduce the number \n of labels or label values for such series. Or enforce these limits via `-maxLabelsPerTimeseries` and \n `-maxLabelValueLen` command-line flags.\n\n - alert: RowsRejectedOnIngestion\n expr: rate(vm_rows_ignored_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "Some rows are rejected on \"{{ $labels.instance }}\" on ingestion attempt"\n description: "Ingested rows on instance \"{{ $labels.instance }}\" are rejected due to the\n following reason: \"{{ $labels.reason }}\""\n\n - alert: TooHighQueryLoad\n expr: increase(vm_concurrent_select_limit_timeout_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "Read queries fail with timeout for {{ $labels.job }} on instance {{ $labels.instance }}"\n description: |\n Instance {{ $labels.instance }} ({{ $labels.job }}) is failing to serve read queries during last 15m.\n Concurrency limit `-search.maxConcurrentRequests` was reached on this instance and extra queries were\n put into the queue for `-search.maxQueueDuration` interval. But even after waiting in the queue these queries weren't served.\n This happens if instance is overloaded with the current workload, or datasource is too slow to respond.\n Possible solutions are the following:\n * reduce the query load;\n * increase compute resources or number of replicas;\n * adjust limits `-search.maxConcurrentRequests` and `-search.maxQueueDuration`.\n See more at https://docs.victoriametrics.com/troubleshooting/#slow-queries.
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts-health.yml
|
alerts-health.yml
|
YAML
| 8,003 | 0.95 | 0.204082 | 0.058394 |
node-utils
| 288 |
2024-01-16T17:35:05.941720
|
Apache-2.0
| false |
4cd8e95a05ea8973537c0d87c745ad46
|
# File contains default list of alerts for VictoriaLogs single server.\n# The alerts below are just recommendations and may require some updates\n# and threshold calibration according to every specific setup.\ngroups:\n - name: vlogs\n rules:\n - alert: DiskRunsOutOfSpace\n expr: |\n sum(vl_data_size_bytes) by(job, instance) /\n (\n sum(vl_free_disk_space_bytes) by(job, instance) +\n sum(vl_data_size_bytes) by(job, instance)\n ) > 0.8\n for: 30m\n labels:\n severity: critical\n annotations:\n summary: "Instance {{ $labels.instance }} (job={{ $labels.job }}) will run out of disk space soon"\n description: "Disk utilisation on instance {{ $labels.instance }} is more than 80%.\n\n Having less than 20% of free disk space could cripple merge processes and overall performance.\n Consider to limit the ingestion rate, decrease retention or scale the disk space if possible."\n\n - alert: RequestErrorsToAPI\n expr: increase(vl_http_errors_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "Too many errors served for path {{ $labels.path }} (instance {{ $labels.instance }})"\n description: "Requests to path {{ $labels.path }} are receiving errors.\n Please verify if clients are sending correct requests."\n\n - alert: RowsRejectedOnIngestion\n expr: rate(vl_rows_dropped_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "Some rows are rejected on \"{{ $labels.instance }}\" on ingestion attempt"\n description: "VictoriaLogs is rejecting to ingest rows on \"{{ $labels.instance }}\" due to the\n following reason: \"{{ $labels.reason }}\""\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts-vlogs.yml
|
alerts-vlogs.yml
|
YAML
| 1,847 | 0.95 | 0.170732 | 0.076923 |
awesome-app
| 562 |
2023-10-14T20:36:56.346145
|
GPL-3.0
| false |
558be06393ec3b6e8045735dc60beec4
|
# File contains default list of alerts for vmagent service.\n# The alerts below are just recommendations and may require some updates\n# and threshold calibration according to every specific setup.\ngroups:\n # Alerts group for vmagent assumes that Grafana dashboard\n # https://grafana.com/grafana/dashboards/12683 is installed.\n # Pls update the `dashboard` annotation according to your setup.\n - name: vmagent\n interval: 30s\n concurrency: 2\n rules:\n - alert: PersistentQueueIsDroppingData\n expr: sum(increase(vm_persistentqueue_bytes_dropped_total[5m])) without (path) > 0\n for: 10m\n labels:\n severity: critical\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=49&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} is dropping data from persistent queue"\n description: "Vmagent dropped {{ $value | humanize1024 }} from persistent queue\n on instance {{ $labels.instance }} for the last 10m."\n\n - alert: RejectedRemoteWriteDataBlocksAreDropped\n expr: sum(increase(vmagent_remotewrite_packets_dropped_total[5m])) without (url) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=79&var-instance={{ $labels.instance }}"\n summary: "Vmagent is dropping data blocks that are rejected by remote storage"\n description: "Job \"{{ $labels.job }}\" on instance {{ $labels.instance }} drops the rejected by \n remote-write server data blocks. Check the logs to find the reason for rejects."\n\n - alert: TooManyScrapeErrors\n expr: increase(vm_promscrape_scrapes_failed_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=31&var-instance={{ $labels.instance }}"\n summary: "Vmagent fails to scrape one or more targets"\n description: "Job \"{{ $labels.job }}\" on instance {{ $labels.instance }} fails to scrape targets for last 15m"\n\n - alert: TooManyWriteErrors\n expr: |\n (sum(increase(vm_ingestserver_request_errors_total[5m])) without (name,net,type)\n +\n sum(increase(vmagent_http_request_errors_total[5m])) without (path,protocol)) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=77&var-instance={{ $labels.instance }}"\n summary: "Vmagent responds with too many errors on data ingestion protocols"\n description: "Job \"{{ $labels.job }}\" on instance {{ $labels.instance }} responds with errors to write requests for last 15m."\n\n - alert: TooManyRemoteWriteErrors\n expr: rate(vmagent_remotewrite_retries_count_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=61&var-instance={{ $labels.instance }}"\n summary: "Job \"{{ $labels.job }}\" on instance {{ $labels.instance }} fails to push to remote storage"\n description: "Vmagent fails to push data via remote write protocol to destination \"{{ $labels.url }}\"\n\n Ensure that destination is up and reachable."\n\n - alert: RemoteWriteConnectionIsSaturated\n expr: |\n (\n rate(vmagent_remotewrite_send_duration_seconds_total[5m])\n / \n vmagent_remotewrite_queues\n ) > 0.9\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=84&var-instance={{ $labels.instance }}"\n summary: "Remote write connection from \"{{ $labels.job }}\" (instance {{ $labels.instance }}) to {{ $labels.url }} is saturated"\n description: "The remote write connection between vmagent \"{{ $labels.job }}\" (instance {{ $labels.instance }}) and destination \"{{ $labels.url }}\"\n is saturated by more than 90% and vmagent won't be able to keep up.\n\n There could be the following reasons for this:\n\n * vmagent can't send data fast enough through the existing network connections. Increase `-remoteWrite.queues` cmd-line flag value to establish more connections per destination.\n\n * remote destination can't accept data fast enough. Check if remote destination has enough resources for processing."\n\n - alert: PersistentQueueForWritesIsSaturated\n expr: rate(vm_persistentqueue_write_duration_seconds_total[5m]) > 0.9\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=98&var-instance={{ $labels.instance }}"\n summary: "Persistent queue writes for instance {{ $labels.instance }} are saturated"\n description: "Persistent queue writes for vmagent \"{{ $labels.job }}\" (instance {{ $labels.instance }})\n are saturated by more than 90% and vmagent won't be able to keep up with flushing data on disk. \n In this case, consider to decrease load on the vmagent or improve the disk throughput."\n\n - alert: PersistentQueueForReadsIsSaturated\n expr: rate(vm_persistentqueue_read_duration_seconds_total[5m]) > 0.9\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=99&var-instance={{ $labels.instance }}"\n summary: "Persistent queue reads for instance {{ $labels.instance }} are saturated"\n description: "Persistent queue reads for vmagent \"{{ $labels.job }}\" (instance {{ $labels.instance }})\n are saturated by more than 90% and vmagent won't be able to keep up with reading data from the disk. \n In this case, consider to decrease load on the vmagent or improve the disk throughput."\n\n - alert: SeriesLimitHourReached\n expr: (vmagent_hourly_series_limit_current_series / vmagent_hourly_series_limit_max_series) > 0.9\n labels:\n severity: critical\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=88&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} reached 90% of the limit"\n description: "Max series limit set via -remoteWrite.maxHourlySeries flag is close to reaching the max value. \n Then samples for new time series will be dropped instead of sending them to remote storage systems."\n\n - alert: SeriesLimitDayReached\n expr: (vmagent_daily_series_limit_current_series / vmagent_daily_series_limit_max_series) > 0.9\n labels:\n severity: critical\n annotations:\n dashboard: "http://localhost:3000/d/G7Z9GzMGz?viewPanel=90&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} reached 90% of the limit"\n description: "Max series limit set via -remoteWrite.maxDailySeries flag is close to reaching the max value. \n Then samples for new time series will be dropped instead of sending them to remote storage systems."\n\n - alert: ConfigurationReloadFailure\n expr: |\n vm_promscrape_config_last_reload_successful != 1\n or\n vmagent_relabel_config_last_reload_successful != 1\n labels:\n severity: warning\n annotations:\n summary: "Configuration reload failed for vmagent instance {{ $labels.instance }}"\n description: "Configuration hot-reload failed for vmagent on instance {{ $labels.instance }}.\n Check vmagent's logs for detailed error message."\n\n - alert: StreamAggrFlushTimeout\n expr: |\n increase(vm_streamaggr_flush_timeouts_total[5m]) > 0\n labels:\n severity: warning\n annotations:\n summary: "Streaming aggregation at \"{{ $labels.job }}\" (instance {{ $labels.instance }}) can't be finished within the configured aggregation interval."\n description: "Stream aggregation process can't keep up with the load and might produce incorrect aggregation results. Check logs for more details.\n Possible solutions: increase aggregation interval; aggregate smaller number of series; reduce samples' ingestion rate to stream aggregation."\n\n - alert: StreamAggrDedupFlushTimeout\n expr: |\n increase(vm_streamaggr_dedup_flush_timeouts_total[5m]) > 0\n labels:\n severity: warning\n annotations:\n summary: "Deduplication \"{{ $labels.job }}\" (instance {{ $labels.instance }}) can't be finished within configured deduplication interval."\n description: "Deduplication process can't keep up with the load and might produce incorrect results. Check docs https://docs.victoriametrics.com/stream-aggregation/#deduplication and logs for more details.\n Possible solutions: increase deduplication interval; deduplicate smaller number of series; reduce samples' ingestion rate."\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts-vmagent.yml
|
alerts-vmagent.yml
|
YAML
| 9,157 | 0.95 | 0.173913 | 0.053691 |
awesome-app
| 363 |
2024-10-26T02:06:02.029599
|
BSD-3-Clause
| false |
90e369c8f0df1df196fc8b5b3f5611d1
|
# File contains default list of alerts for vmalert service.\n# The alerts below are just recommendations and may require some updates\n# and threshold calibration according to every specific setup.\ngroups:\n # Alerts group for vmalert assumes that Grafana dashboard\n # https://grafana.com/grafana/dashboards/14950 is installed.\n # Pls update the `dashboard` annotation according to your setup.\n - name: vmalert\n interval: 30s\n rules:\n - alert: ConfigurationReloadFailure\n expr: vmalert_config_last_reload_successful != 1\n labels:\n severity: warning\n annotations:\n summary: "Configuration reload failed for vmalert instance {{ $labels.instance }}"\n description: "Configuration hot-reload failed for vmalert on instance {{ $labels.instance }}.\n Check vmalert's logs for detailed error message."\n\n - alert: AlertingRulesError\n expr: sum(increase(vmalert_alerting_rules_errors_total[5m])) without(id) > 0\n for: 5m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/LzldHAVnz?viewPanel=13&var-instance={{ $labels.instance }}&var-file={{ $labels.file }}&var-group={{ $labels.group }}"\n summary: "Alerting rules are failing for vmalert instance {{ $labels.instance }}"\n description: "Alerting rules execution is failing for \"{{ $labels.alertname }}\" from group \"{{ $labels.group }}\" in file \"{{ $labels.file }}\".\n Check vmalert's logs for detailed error message."\n\n - alert: RecordingRulesError\n expr: sum(increase(vmalert_recording_rules_errors_total[5m])) without(id) > 0\n for: 5m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/LzldHAVnz?viewPanel=30&var-instance={{ $labels.instance }}&var-file={{ $labels.file }}&var-group={{ $labels.group }}"\n summary: "Recording rules are failing for vmalert instance {{ $labels.instance }}"\n description: "Recording rules execution is failing for \"{{ $labels.recording }}\" from group \"{{ $labels.group }}\" in file \"{{ $labels.file }}\".\n Check vmalert's logs for detailed error message."\n\n - alert: RecordingRulesNoData\n expr: sum(vmalert_recording_rules_last_evaluation_samples) without(id) < 1\n for: 30m\n labels:\n severity: info\n annotations:\n dashboard: "http://localhost:3000/d/LzldHAVnz?viewPanel=33&var-file={{ $labels.file }}&var-group={{ $labels.group }}"\n summary: "Recording rule {{ $labels.recording }} ({{ $labels.group }}) produces no data"\n description: "Recording rule \"{{ $labels.recording }}\" from group \"{{ $labels.group }}\ in file \"{{ $labels.file }}\" \n produces 0 samples over the last 30min. It might be caused by a misconfiguration \n or incorrect query expression."\n\n - alert: TooManyMissedIterations\n expr: increase(vmalert_iteration_missed_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "vmalert instance {{ $labels.instance }} is missing rules evaluations"\n description: "vmalert instance {{ $labels.instance }} is missing rules evaluations for group \"{{ $labels.group }}\" in file \"{{ $labels.file }}\".\n The group evaluation time takes longer than the configured evaluation interval. This may result in missed \n alerting notifications or recording rules samples. Try increasing evaluation interval or concurrency of\n group \"{{ $labels.group }}\". See https://docs.victoriametrics.com/vmalert/#groups. \n If rule expressions are taking longer than expected, please see https://docs.victoriametrics.com/troubleshooting/#slow-queries."\n\n - alert: RemoteWriteErrors\n expr: increase(vmalert_remotewrite_errors_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "vmalert instance {{ $labels.instance }} is failing to push metrics to remote write URL"\n description: "vmalert instance {{ $labels.instance }} is failing to push metrics generated via alerting \n or recording rules to the configured remote write URL. Check vmalert's logs for detailed error message."\n\n - alert: RemoteWriteDroppingData\n expr: increase(vmalert_remotewrite_dropped_rows_total[5m]) > 0\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "vmalert instance {{ $labels.instance }} is dropping data sent to remote write URL"\n description: "vmalert instance {{ $labels.instance }} is failing to send results of alerting or recording rules \n to the configured remote write URL. This may result into gaps in recording rules or alerts state.\n Check vmalert's logs for detailed error message."\n\n - alert: AlertmanagerErrors\n expr: increase(vmalert_alerts_send_errors_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n summary: "vmalert instance {{ $labels.instance }} is failing to send notifications to Alertmanager"\n description: "vmalert instance {{ $labels.instance }} is failing to send alert notifications to \"{{ $labels.addr }}\".\n Check vmalert's logs for detailed error message."\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts-vmalert.yml
|
alerts-vmalert.yml
|
YAML
| 5,425 | 0.95 | 0.229167 | 0.067416 |
node-utils
| 650 |
2025-06-15T09:18:28.166235
|
BSD-3-Clause
| false |
6c1aee3a26835e5fe98b587e0f57bef9
|
# This file provides a recommended list of alerts to monitor the health of VictoriaMetrics Anomaly Detection (vmanomaly).\n# Note: The alerts below are general recommendations and may require customization,\n# including threshold adjustments, to suit the specifics of your setup.\n\ngroups:\n # Note - Adjust the `job` filter to match your specific setup.\n # By default, the `job` label for vmanomaly in push-based self-monitoring mode is set to `vmanomaly`.\n # However, this can be overridden using additional labels. For further details, refer to the example here:\n # https://docs.victoriametrics.com/anomaly-detection/components/monitoring/?highlight=extra_labels#monitoring-section-config-example\n - name: vmanomaly-health\n rules:\n - alert: TooManyRestarts\n expr: changes(process_start_time_seconds{job=~".*vmanomaly.*"}[15m]) > 2\n labels:\n severity: critical\n annotations:\n summary: "{{ $labels.job }} too many restarts (instance {{ $labels.instance }})"\n description: |\n Job {{ $labels.job }} (instance {{ $labels.instance }}) has restarted more than twice in the last 15 minutes.\n It might be crashlooping. Please check the logs for more details.\n Additionally, refer to the "r:errors" value in the "Instance Overview" section of the self-monitoring Grafana dashboard.\n\n # works if you use Prometheus scraping (pull model only)\n - alert: ServiceDown\n expr: up{job=~".*vmanomaly.*"} == 0\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "Service {{ $labels.job }} is down on {{ $labels.instance }}"\n description: "{{ $labels.instance }} of job {{ $labels.job }} has been down for more than 5m"\n\n # default value of 900 Should be changed to the scrape_interval for pull metrics. For push metrics this should be the lowest fit_every or infer_every in your vmanomaly config.\n - alert: NoSelfMonitoringMetrics\n expr: >\n lag(vmanomaly_start_time_seconds{job="vmanomaly"}[24h]) > 900\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "Metrics have not been seen from \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") for {{ $value }} seconds"\n description: >\n The missing metric may indicate that vmanomaly is not running or is inaccessible from vmagent or the remotewrite endpoint.\n\n - alert: ProcessNearFDLimits\n expr: (process_max_fds{job=~".*vmanomaly.*"} - process_open_fds{job=~".*vmanomaly.*"}) < 100\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "Number of free file descriptors is less than 100 for \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") for the last 5m"\n description: |\n Exhausting OS file descriptors limit can cause severe degradation of the process.\n Consider to increase the limit as fast as possible.\n\n - alert: TooHighCPUUsage\n expr: > \n sum(rate(process_cpu_seconds_total{job=~".*vmanomaly.*"}[5m])) by (job, instance) /\n sum(vmanomaly_cpu_cores_available{job=~".*vmanomaly.*"}[5m]) by (job, instance) > 0.9\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "More than 90% of CPU is used by \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") during the last 5m"\n description: >\n Too high CPU usage may be a sign of insufficient resources and make process unstable.\n Consider to either increase available CPU resources or decrease the load on the process.\n\n - alert: TooHighMemoryUsage\n expr: (min_over_time(process_resident_memory_bytes[10m]) / vmanomaly_available_memory_bytes) > 0.85\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "It is more than 85% of memory used by \"{{ $labels.job }}\"(\"{{ $labels.instance }}\")"\n description: |\n Too high memory usage may result into multiple issues such as OOMs or degraded performance.\n E.g. it can be caused by high churn rate in your input data.\n Consider to either increase available memory or decrease the load on the process.\n\n - name: vmanomaly-issues\n rules:\n - alert: ServiceErrorsDetected\n expr: sum(increase(vmanomaly_model_run_errors_total{job=~".*vmanomaly.*"}[5m])) by (job, instance, stage) > 0\n for: 5m\n labels:\n severity: critical\n annotations:\n summary: "Model Run Errors in \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") stage: {{ $labels.stage }} during the last 5m"\n description: >\n Errors in the service may indicate a problem with the service itself or its dependencies.\n Investigate the logs for more details.\n - alert: SkippedModelRunsDetected\n expr: sum(increase(vmanomaly_model_runs_skipped_total{job=~".*vmanomaly.*"}[5m])) by (job, instance, stage) > 0\n for: 5m\n labels:\n severity: warning\n annotations:\n summary: "Skipped Model Runs in \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") stage: {{ $labels.stage }} during the last 5m"\n description: >\n Skipped model runs may indicate issues like:\n 1. No new or valid data is available for the current run.\n 2. The presence of new time series that do not have a trained model yet.\n 3. No new (or valid) datapoints produced during inference.\n Investigate the logs for more details.\n - alert: HighReadErrorRate\n expr: >\n (\n sum(increase(vmanomaly_reader_responses_total{job=~".*vmanomaly.*", code=~"2.."}[5m])) by (job, instance, url) /\n sum(increase(vmanomaly_reader_responses_total{job=~".*vmanomaly.*"}[5m])) by (job, instance, url)\n ) < 0.95\n for: 5m\n labels:\n severity: warning\n annotations:\n summary: "High error rate in read requests for \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") for url: {{ $labels.url }} during the last 5m"\n description: >\n Reading errors may indicate issues with the input data source, server-side constraint violations, security or network issues. \n Investigate the logs for more details.\n - alert: HighWriteErrorRate\n expr: >\n (\n sum(increase(vmanomaly_writer_responses_total{job=~".*vmanomaly.*", code=~"2.."}[5m])) by (job, instance, url) /\n sum(increase(vmanomaly_writer_responses_total{job=~".*vmanomaly.*"}[5m])) by (job, instance, url)\n ) < 0.95\n for: 5m\n labels:\n severity: warning\n annotations:\n summary: "High error rate in write requests for \"{{ $labels.job }}\"(\"{{ $labels.instance }}\") for url: {{ $labels.url }} during the last 5m"\n description: >\n Writing errors may indicate issues with the destination source, server-side constraint violations, security, or network issues. \n Investigate the logs for more details.\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts-vmanomaly.yml
|
alerts-vmanomaly.yml
|
YAML
| 7,163 | 0.95 | 0.195489 | 0.071429 |
python-kit
| 400 |
2024-04-28T01:19:43.083403
|
Apache-2.0
| false |
89209a647017b9bf190eb990c3c75e42
|
# File contains default list of alerts for vmauth service.\n# The alerts below are just recommendations and may require some updates\n# and threshold calibration according to every specific setup.\ngroups:\n - name: vmauth\n interval: 30s\n rules:\n - alert: ConcurrentRequestsLimitReached\n expr: sum(increase(vmauth_concurrent_requests_limit_reached_total[1m])) by (instance) > 0\n for: 3m\n labels:\n severity: warning\n annotations:\n summary: "vmauth ({{ $labels.instance }}) reached concurrent requests limit"\n description: "Possible solutions: increase the limit with flag: -maxConcurrentRequests, \n deploy additional vmauth replicas, check requests latency at backend service. \n See more details at https://docs.victoriametrics.com/vmauth/#concurrency-limiting"\n - alert: UserConcurrentRequestsLimitReached\n expr: sum(increase(vmauth_user_concurrent_requests_limit_reached_total[1m])) by (username) > 0\n for: 3m\n labels:\n severity: warning\n annotations:\n summary: "vmauth has reached concurrent requests limit for username {{ $labels.username }}"\n description: "Possible solutions: increase limit with flag: -maxConcurrentPerUserRequests, \n deploy additional vmauth replicas, check requests latency at backend service."\n\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts-vmauth.yml
|
alerts-vmauth.yml
|
YAML
| 1,370 | 0.95 | 0.148148 | 0.115385 |
python-kit
| 898 |
2025-05-08T15:52:48.296050
|
GPL-3.0
| false |
3c3af82189a882ff869ed80d8140c9e6
|
# File contains default list of alerts for VictoriaMetrics single server.\n# The alerts below are just recommendations and may require some updates\n# and threshold calibration according to every specific setup.\ngroups:\n # Alerts group for VM single assumes that Grafana dashboard\n # https://grafana.com/grafana/dashboards/10229 is installed.\n # Pls update the `dashboard` annotation according to your setup.\n - name: vmsingle\n interval: 30s\n concurrency: 2\n rules:\n - alert: DiskRunsOutOfSpaceIn3Days\n expr: |\n sum(vm_free_disk_space_bytes) without(path) /\n (\n rate(vm_rows_added_to_storage_total[1d]) * (\n sum(vm_data_size_bytes{type!~"indexdb.*"}) without(type) /\n sum(vm_rows{type!~"indexdb.*"}) without(type)\n )\n ) < 3 * 24 * 3600 > 0\n for: 30m\n labels:\n severity: critical\n annotations:\n dashboard: "http://localhost:3000/d/wNf0q_kZk?viewPanel=53&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} will run out of disk space soon"\n description: "Taking into account current ingestion rate, free disk space will be enough only\n for {{ $value | humanizeDuration }} on instance {{ $labels.instance }}.\n\n Consider to limit the ingestion rate, decrease retention or scale the disk space if possible."\n\n - alert: NodeBecomesReadonlyIn3Days\n expr: |\n sum(vm_free_disk_space_bytes - vm_free_disk_space_limit_bytes) without(path) /\n (\n rate(vm_rows_added_to_storage_total[1d]) * (\n sum(vm_data_size_bytes{type!~"indexdb.*"}) without(type) /\n sum(vm_rows{type!~"indexdb.*"}) without(type)\n )\n ) < 3 * 24 * 3600 > 0\n for: 30m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/oS7Bi_0Wz?viewPanel=53&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} will become read-only in 3 days"\n description: "Taking into account current ingestion rate and free disk space\n instance {{ $labels.instance }} is writable for {{ $value | humanizeDuration }}.\n\n Consider to limit the ingestion rate, decrease retention or scale the disk space up if possible."\n\n - alert: DiskRunsOutOfSpace\n expr: |\n sum(vm_data_size_bytes) by(job, instance) /\n (\n sum(vm_free_disk_space_bytes) by(job, instance) +\n sum(vm_data_size_bytes) by(job, instance)\n ) > 0.8\n for: 30m\n labels:\n severity: critical\n annotations:\n dashboard: "http://localhost:3000/d/wNf0q_kZk?viewPanel=53&var-instance={{ $labels.instance }}"\n summary: "Instance {{ $labels.instance }} (job={{ $labels.job }}) will run out of disk space soon"\n description: "Disk utilisation on instance {{ $labels.instance }} is more than 80%.\n\n Having less than 20% of free disk space could cripple merge processes and overall performance.\n Consider to limit the ingestion rate, decrease retention or scale the disk space if possible."\n\n - alert: RequestErrorsToAPI\n expr: increase(vm_http_request_errors_total[5m]) > 0\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/wNf0q_kZk?viewPanel=35&var-instance={{ $labels.instance }}"\n summary: "Too many errors served for path {{ $labels.path }} (instance {{ $labels.instance }})"\n description: "Requests to path {{ $labels.path }} are receiving errors.\n Please verify if clients are sending correct requests."\n\n - alert: TooHighChurnRate\n expr: |\n (\n sum(rate(vm_new_timeseries_created_total[5m])) by(instance)\n /\n sum(rate(vm_rows_inserted_total[5m])) by(instance)\n ) > 0.1\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/wNf0q_kZk?viewPanel=66&var-instance={{ $labels.instance }}"\n summary: "Churn rate is more than 10% on \"{{ $labels.instance }}\" for the last 15m"\n description: "VM constantly creates new time series on \"{{ $labels.instance }}\".\n\n This effect is known as Churn Rate.\n\n High Churn Rate tightly connected with database performance and may\n result in unexpected OOM's or slow queries."\n\n - alert: TooHighChurnRate24h\n expr: |\n sum(increase(vm_new_timeseries_created_total[24h])) by(instance)\n >\n (sum(vm_cache_entries{type="storage/hour_metric_ids"}) by(instance) * 3)\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/wNf0q_kZk?viewPanel=66&var-instance={{ $labels.instance }}"\n summary: "Too high number of new series on \"{{ $labels.instance }}\" created over last 24h"\n description: "The number of created new time series over last 24h is 3x times higher than\n current number of active series on \"{{ $labels.instance }}\".\n\n This effect is known as Churn Rate.\n\n High Churn Rate tightly connected with database performance and may\n result in unexpected OOM's or slow queries."\n\n - alert: TooHighSlowInsertsRate\n expr: |\n (\n sum(rate(vm_slow_row_inserts_total[5m])) by(instance)\n /\n sum(rate(vm_rows_inserted_total[5m])) by(instance)\n ) > 0.05\n for: 15m\n labels:\n severity: warning\n annotations:\n dashboard: "http://localhost:3000/d/wNf0q_kZk?viewPanel=68&var-instance={{ $labels.instance }}"\n summary: "Percentage of slow inserts is more than 5% on \"{{ $labels.instance }}\" for the last 15m"\n description: "High rate of slow inserts on \"{{ $labels.instance }}\" may be a sign of resource exhaustion\n for the current load. It is likely more RAM is needed for optimal handling of the current number of active time series.\n See also https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3976#issuecomment-1476883183"
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\rules\alerts.yml
|
alerts.yml
|
YAML
| 6,335 | 0.95 | 0.15748 | 0.04918 |
node-utils
| 817 |
2023-10-01T03:07:16.683300
|
GPL-3.0
| false |
b54dc114384da82059c4ca2731d654c4
|
services:\n # meta service will be ignored by compose\n .victorialogs:\n image: docker.io/victoriametrics/victoria-logs:v1.20.0-victorialogs\n command:\n - -storageDataPath=/vlogs\n - -loggerFormat=json\n - -syslog.listenAddr.tcp=0.0.0.0:8094\n - -datadog.streamFields=service,hostname,ddsource\n - -journald.streamFields=_HOSTNAME,_SYSTEMD_UNIT,_PID\n - -journald.ignoreFields=MESSAGE_ID,INVOCATION_ID,USER_INVOCATION_ID,\n - -journald.ignoreFields=_BOOT_ID,_MACHINE_ID,_SYSTEMD_INVOCATION_ID,_STREAM_ID,_UID\n deploy:\n replicas: 0\n healthcheck:\n test: ["CMD", "wget", "-qO-", "http://127.0.0.1:9428/health"]\n interval: 1s\n timeout: 1s\n retries: 10\n\n dd-proxy:\n image: docker.io/victoriametrics/vmauth:v1.115.0\n restart: on-failure\n volumes:\n - ./:/etc/vmauth\n command: -auth.config=/etc/vmauth/vmauth.yaml\n\n victorialogs:\n extends: .victorialogs\n ports:\n - '9428:9428'\n volumes:\n - victorialogs:/vlogs\n deploy:\n replicas: 1\n\n # second replica is needed for HA setup and its replica count is set to 1 in compose-ha.yaml file\n victorialogs-2:\n extends: .victorialogs\n ports:\n - '9429:9428'\n volumes:\n - victorialogs-2:/vlogs\n deploy:\n replicas: 0\n\n victoriametrics:\n image: victoriametrics/victoria-metrics:v1.112.0\n ports:\n - '8428:8428'\n command:\n - -storageDataPath=/vmsingle\n - -loggerFormat=json\n volumes:\n - victoriametrics:/vmsingle\n healthcheck:\n test: ["CMD", "wget", "-qO-", "http://127.0.0.1:8428/health"]\n interval: 1s\n timeout: 1s\n retries: 10\n\nvolumes:\n victorialogs:\n victorialogs-2:\n victoriametrics:\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\victorialogs\compose-base.yml
|
compose-base.yml
|
YAML
| 1,713 | 0.8 | 0.015385 | 0.033333 |
react-lib
| 377 |
2023-07-17T02:16:49.092161
|
BSD-3-Clause
| false |
b404fe8e00645288e32c2a52fe6027ef
|
unauthorized_user:\n url_map:\n - src_paths:\n - "/api/v2/logs"\n - "/api/v1/validate"\n url_prefix: "http://victorialogs:9428/insert/datadog/"\n - src_paths:\n - "/api/v1/series"\n - "/api/v2/series"\n - "/api/beta/sketches"\n - "/api/v1/validate"\n - "/api/v1/check_run"\n - "/intake"\n - "/api/v1/metadata"\n url_prefix: "http://victoriametrics:8428/datadog/"\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\victorialogs\vmauth.yml
|
vmauth.yml
|
YAML
| 429 | 0.8 | 0 | 0 |
node-utils
| 453 |
2024-03-16T16:23:05.789839
|
Apache-2.0
| false |
6cf2552f7ea0e605947bf312d8c08236
|
include:\n - ../compose-base.yml\nservices:\n agent:\n image: docker.io/datadog/agent:7.57.2\n restart: on-failure\n volumes:\n - /var/lib/docker/containers:/var/lib/docker/containers\n - /var/run/docker.sock:/var/run/docker.sock:ro\n - /proc/:/host/proc/:ro\n - /sys/fs/cgroup/:/host/sys/fs/cgroup:ro\n environment:\n DD_API_KEY: test\n DD_URL: http://dd-proxy:8427\n DD_LOGS_CONFIG_LOGS_DD_URL: http://dd-proxy:8427\n DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL: true\n DD_LOGS_ENABLED: true\n DD_LOGS_CONFIG_USE_HTTP: true\n DD_PROCESS_CONFIG_PROCESS_COLLECTION_ENABLED: false\n DD_PROCESS_CONFIG_CONTAINER_COLLECTION_ENABLED: false\n DD_PROCESS_CONFIG_PROCESS_DISCOVERY_ENABLED: false\n depends_on:\n victorialogs:\n condition: service_healthy\n victoriametrics:\n condition: service_healthy\n
|
dataset_sample\yaml\VictoriaMetrics_VictoriaMetrics\deployment\docker\victorialogs\datadog-agent\compose-base.yml
|
compose-base.yml
|
YAML
| 869 | 0.8 | 0 | 0 |
vue-tools
| 901 |
2024-09-27T06:06:48.343374
|
MIT
| false |
8210a16344d637cde616197af7ef05af
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.