content
stringlengths 1
103k
⌀ | path
stringlengths 8
216
| filename
stringlengths 2
179
| language
stringclasses 15
values | size_bytes
int64 2
189k
| quality_score
float64 0.5
0.95
| complexity
float64 0
1
| documentation_ratio
float64 0
1
| repository
stringclasses 5
values | stars
int64 0
1k
| created_date
stringdate 2023-07-10 19:21:08
2025-07-09 19:11:45
| license
stringclasses 4
values | is_test
bool 2
classes | file_hash
stringlengths 32
32
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Sylius\Component\Core\Model\TaxRate:\n sales_tax:\n code: sales_tax\n name: "Sales Tax 20\\%"\n zone: "@zone_eu"\n calculator: "default"\n category: "@tax_category_1"\n regular_tax:\n code: regular_tax\n name: "Regular Tax 20\\%"\n zone: "@zone_eu"\n calculator: "default"\n category: "@tax_category_2"\n
|
dataset_sample\yaml\Sylius_Sylius\tests\DataFixtures\ORM\resources\tax_rates.yml
|
tax_rates.yml
|
YAML
| 367 | 0.7 | 0 | 0 |
vue-tools
| 572 |
2023-11-14T19:12:44.394078
|
BSD-3-Clause
| true |
17f3cc864ec99531840b1befb778bdf9
|
Sylius\Component\Core\Model\Product:\n mug:\n code: "MUG"\n channels: ["@channel"]\n currentLocale: "en_US"\n translations:\n en_US: "@mug_translation"\n\nSylius\Component\Core\Model\ProductTranslation:\n mug_translation:\n slug: "mug"\n locale: "en_US"\n name: "Mug"\n description: "<paragraph(2)>"\n translatable: "@mug"\n\nSylius\Component\Core\Model\ProductVariant:\n mug_sw:\n code: "MUG_SW"\n product: "@mug"\n currentLocale: "en_US"\n translations:\n en_US: "@sw_mug_translation"\n updatedAt: "<(new \\DateTime('2015-10-10'))>"\n channelPricings:\n CHANNEL: "@sw_mug_channel_pricing"\n EXPENSIVE_CHANNEL: "@sw_mug_expensive_channel_pricing"\n hard_available_mug:\n code: "HARD_AVAILABLE_MUG"\n product: "@mug"\n currentLocale: "en_US"\n translations:\n en_US: "@hard_available_mug_translation"\n updatedAt: "<(new \\DateTime('2015-10-05'))>"\n channelPricings:\n CHANNEL: "@hard_available_mug_channel_pricing"\n\nSylius\Component\Core\Model\ChannelPricing:\n sw_mug_channel_pricing:\n channelCode: "CHANNEL"\n price: 2000\n hard_available_mug_channel_pricing:\n channelCode: "CHANNEL"\n price: 3000\n sw_mug_expensive_channel_pricing:\n channelCode: "EXPENSIVE_CHANNEL"\n price: 11000\n\nSylius\Component\Product\Model\ProductVariantTranslation:\n sw_mug_translation:\n locale: "en_US"\n name: "Star wars mug"\n translatable: "@mug_sw"\n hard_available_mug_translation:\n locale: "en_US"\n name: "Breaking bad mug"\n translatable: "@hard_available_mug"\n\nSylius\Component\Core\Model\Order:\n order_001:\n channel: "@channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: paid\n checkoutCompletedAt: "<(new \\DateTime('2019-12-27 00:00:01'))>"\n order_002:\n channel: "@channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: paid\n checkoutCompletedAt: "<(new \\DateTime('2020-01-15 06:00:01'))>"\n order_003:\n channel: "@channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: paid\n checkoutCompletedAt: "<(new \\DateTime('2020-01-15 15:00:01'))>"\n order_004:\n channel: "@channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: paid\n checkoutCompletedAt: "<(new \\DateTime('2020-01-18 06:00:01'))>"\n order_005:\n channel: "@channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: paid\n checkoutCompletedAt: "<(new \\DateTime('2020-02-18 08:00:01'))>"\n order_006:\n channel: "@channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: paid\n checkoutCompletedAt: "<(new \\DateTime('2020-02-18 09:30:01'))>"\n expensive_order_001:\n channel: "@expensive_channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: paid\n checkoutCompletedAt: "<(new \\DateTime('2019-12-29 09:30:01'))>"\n expensive_order_002:\n channel: "@expensive_channel"\n currencyCode: USD\n localeCode: en_US\n customer: "@customer_oliver"\n state: cart\n checkoutState: completed\n paymentState: new\n checkoutCompletedAt: "<(new \\DateTime('2020-01-18 09:30:01'))>"\n\nSylius\Component\Core\Model\OrderItem:\n sw_mug_item_1:\n variant: "@mug_sw"\n order: "@order_001"\n sw_mug_item_2:\n variant: "@mug_sw"\n order: "@order_002"\n sw_mug_item_4:\n variant: "@mug_sw"\n order: "@order_004"\n sw_mug_item_5:\n variant: "@mug_sw"\n order: "@order_005"\n sw_mug_item_6:\n variant: "@mug_sw"\n order: "@order_006"\n hard_available_mug_item:\n variant: "@hard_available_mug"\n order: "@order_003"\n expensive_sw_mug_item_1:\n variant: "@mug_sw"\n order: "@expensive_order_001"\n expensive_sw_mug_item_2:\n variant: "@mug_sw"\n order: "@expensive_order_002"\n\nSylius\Component\Core\Model\OrderItemUnit:\n sw_mug_item_unit1:\n __construct: ["@sw_mug_item_1"]\n sw_mug_item_unit2:\n __construct: ["@sw_mug_item_2"]\n sw_mug_item_unit4:\n __construct: ["@sw_mug_item_4"]\n sw_mug_item_unit5:\n __construct: ["@sw_mug_item_5"]\n sw_mug_item_unit6:\n __construct: ["@sw_mug_item_6"]\n hard_available_mug_item_unit:\n __construct: ["@hard_available_mug_item"]\n expensive_sw_mug_item_unit1:\n __construct: ["@expensive_sw_mug_item_1"]\n expensive_sw_mug_item_unit2:\n __construct: ["@expensive_sw_mug_item_1"]\n expensive_sw_mug_item_unit3:\n __construct: ["@expensive_sw_mug_item_1"]\n expensive_sw_mug_item_unit4:\n __construct: ["@expensive_sw_mug_item_2"]\n\nSylius\Component\Core\Model\Channel:\n channel:\n code: "CHANNEL"\n name: "Channel"\n hostname: "localhost"\n description: "Lorem ipsum"\n baseCurrency: "@currency"\n defaultLocale: "@locale"\n color: "black"\n enabled: true\n taxCalculationStrategy: "order_items_based"\n currencies: ["@currency"]\n expensive_channel:\n code: "EXPENSIVE_CHANNEL"\n name: "Expensive channel"\n hostname: "money.com"\n description: "Lorem ipsum"\n baseCurrency: "@currency"\n defaultLocale: "@locale"\n color: "gold"\n enabled: true\n taxCalculationStrategy: "order_items_based"\n currencies: ["@currency"]\n\nSylius\Component\Currency\Model\Currency:\n currency:\n code: USD\n\nSylius\Component\Locale\Model\Locale:\n locale:\n code: en_US\n\nSylius\Component\Core\Model\Customer:\n customer_oliver:\n firstName: "Oliver"\n lastName: "Queen"\n email: "[email protected]"\n emailCanonical: "[email protected]"\n birthday: <(new \DateTime())>\n
|
dataset_sample\yaml\Sylius_Sylius\tests\DataFixtures\ORM\resources\year_sales.yml
|
year_sales.yml
|
YAML
| 6,662 | 0.7 | 0 | 0 |
node-utils
| 637 |
2025-03-07T14:35:50.826982
|
MIT
| true |
db28fbff202c979586c5e15f30828bd9
|
Sylius\Component\Addressing\Model\ZoneMember:\n member_{NL, BE}:\n code: <current()>\n\nSylius\Component\Addressing\Model\Zone:\n zone_eu:\n code: EU\n name: European Union\n type: country\n members: ["@member_NL", "@member_BE"]\n
|
dataset_sample\yaml\Sylius_Sylius\tests\DataFixtures\ORM\resources\zones.yml
|
zones.yml
|
YAML
| 261 | 0.7 | 0 | 0 |
awesome-app
| 283 |
2024-10-06T11:25:00.364868
|
Apache-2.0
| true |
dfbbbc4a6f774adf39c477b8775b03ba
|
comment: false\n\ncoverage:\n range: "40...100"\n precision: 1\n status:\n patch:\n default:\n informational: true\n project:\n default:\n informational: true\n\ngithub_checks:\n annotations: false\n\nignore:\n - "**.pb.go"\n - "**_mocked.go"\n - "**/mocks/*"\n
|
dataset_sample\yaml\syncthing_syncthing\.codecov.yml
|
.codecov.yml
|
YAML
| 278 | 0.8 | 0 | 0 |
python-kit
| 684 |
2024-07-24T18:06:20.389437
|
GPL-3.0
| false |
537e3d811a927cfa41f7ca7c3b380b4c
|
linters:\n enable-all: true\n disable:\n - cyclop\n - depguard\n - exhaustive\n - exhaustruct\n - funlen\n - gci\n - gochecknoglobals\n - gochecknoinits\n - gocognit\n - goconst\n - gocyclo\n - godox\n - gofmt\n - goimports\n - gomoddirectives\n - inamedparam\n - interfacebloat\n - ireturn\n - lll\n - maintidx\n - nestif\n - nonamedreturns\n - paralleltest\n - protogetter\n - scopelint\n - tagalign\n - tagliatelle\n - testpackage\n - varnamelen\n - wsl\n\nissues:\n exclude-dirs:\n - internal/gen\n - cmd/dev\n - repos
|
dataset_sample\yaml\syncthing_syncthing\.golangci.yml
|
.golangci.yml
|
YAML
| 586 | 0.85 | 0 | 0 |
react-lib
| 171 |
2025-04-18T11:16:04.035311
|
BSD-3-Clause
| false |
c636665399ce6b28a312cf74c2475423
|
# This is the policy-bot configuration for this repository. It controls\n# which approvals are required for any given pull request. The format is\n# described at https://github.com/palantir/policy-bot. The syntax of the\n# policy can be verified by the bot:\n# curl https://pb.syncthing.net/api/validate -X PUT -T .policy.yml\n\n# The policy below is what is required for any pull request.\npolicy:\n approval:\n - subject is conventional commit\n - project metadata requires maintainer approval\n - or:\n - is approved by a syncthing contributor\n - is a translation or dependency update by a contributor\n - is a trivial change by a contributor\n\n # Additionally, contributors can disapprove of a PR\n disapproval:\n requires:\n teams:\n - syncthing/contributors\n\n# The rules for the policy are described below.\n\napproval_rules:\n\n # All commits (PRs before squashing) should have a valid conventional\n # commit type subject.\n - name: subject is conventional commit\n requires:\n conditions:\n title:\n matches:\n - '^(feat|fix|docs|chore|refactor|build): [a-z].+'\n - '^(feat|fix|docs|chore|refactor|build)\(\w+(, \w+)*\): [a-z].+'\n\n # Changes to important project metadata and documentation, including this\n # policy, require signoff by a maintainer\n - name: project metadata requires maintainer approval\n if:\n changed_files:\n paths:\n - ^[^/]+\.md\n - ^\.policy\.yml\n - ^LICENSE\n requires:\n count: 1\n teams:\n - syncthing/maintainers\n options:\n ignore_update_merges: true\n allow_contributor: true\n\n # Regular pull requests require approval by an active contributor\n - name: is approved by a syncthing contributor\n requires:\n count: 1\n teams:\n - syncthing/contributors\n options:\n ignore_update_merges: true\n allow_contributor: true\n\n # Changes to some files (translations, dependencies, compatibility) do not\n # require approval if they were proposed by a contributor and have a\n # matching commit subject\n - name: is a translation or dependency update by a contributor\n if:\n only_changed_files:\n paths:\n - ^gui/default/assets/lang/\n - ^go\.mod$\n - ^go\.sum$\n - ^compat\.yaml$\n title:\n matches:\n - '^chore\(gui\):'\n - '^build\(deps\):'\n - '^build\(compat\):'\n has_author_in:\n teams:\n - syncthing/contributors\n\n # If the change is small and the label "trivial" is added, we accept that\n # on trust. These PRs can be audited after the fact as appropriate.\n # Features are not trivial.\n - name: is a trivial change by a contributor\n if:\n modified_lines:\n total: "< 25"\n title:\n not_matches:\n - '^feat'\n has_labels:\n - trivial\n has_author_in:\n teams:\n - syncthing/contributors\n
|
dataset_sample\yaml\syncthing_syncthing\.policy.yml
|
.policy.yml
|
YAML
| 2,940 | 0.95 | 0.081633 | 0.213483 |
awesome-app
| 641 |
2023-09-07T19:52:03.421082
|
BSD-3-Clause
| false |
4d28d1237aa8b2a3a8703924ff04303c
|
version: 2\nupdates:\n - package-ecosystem: "github-actions"\n directory: "/"\n schedule:\n interval: monthly\n open-pull-requests-limit: 10\n\n - package-ecosystem: "gomod"\n directory: "/"\n schedule:\n interval: monthly\n open-pull-requests-limit: 10\n
|
dataset_sample\yaml\syncthing_syncthing\.github\dependabot.yml
|
dependabot.yml
|
YAML
| 272 | 0.7 | 0 | 0 |
awesome-app
| 751 |
2024-01-13T16:01:15.695302
|
GPL-3.0
| false |
dfb46c4642ccdede4c5e45ff5e34473a
|
github: syncthing\ncustom: "https://syncthing.net/donations/"\n\n# patreon: # Replace with a single Patreon username\n# open_collective: # Replace with a single Open Collective username\n# ko_fi: # Replace with a single Ko-fi username\n# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel\n# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry\n# liberapay: # Replace with a single Liberapay username\n# issuehunt: # Replace with a single IssueHunt username\n# otechie: # Replace with a single Otechie username\n
|
dataset_sample\yaml\syncthing_syncthing\.github\FUNDING.yml
|
FUNDING.yml
|
YAML
| 576 | 0.8 | 0 | 0.8 |
python-kit
| 59 |
2023-10-03T15:26:08.098802
|
BSD-3-Clause
| false |
d3ab9b932bee5882c9b9754c220a883d
|
name: Feature request\ndescription: File a new feature request\nlabels: ["enhancement", "needs-triage"]\nbody:\n\n - type: textarea\n id: feature\n attributes:\n label: Feature description\n description: Please describe the behavior you'd like to see.\n validations:\n required: true\n\n - type: textarea\n id: problem-usecase\n attributes:\n label: Problem or use case\n description: Please explain which problem this would solve, or what the use case is for the feature. Keep in mind that it's more likely to be implemented if it's generally useful for a larger number of users.\n validations:\n required: true\n\n - type: textarea\n id: alternatives\n attributes:\n label: Alternatives or workarounds\n description: Please describe any alternatives or workarounds you have considered and, possibly, rejected.\n validations:\n required: true\n
|
dataset_sample\yaml\syncthing_syncthing\.github\ISSUE_TEMPLATE\01-feature.yml
|
01-feature.yml
|
YAML
| 892 | 0.85 | 0.107143 | 0 |
react-lib
| 421 |
2024-12-09T08:36:05.049393
|
GPL-3.0
| false |
7cee84e17c3d8b0695c13dc3c1c92d8f
|
name: Bug report\ndescription: If you're actually looking for support instead, see "I need help / I have a question".\nlabels: ["bug", "needs-triage"]\nbody:\n - type: markdown\n attributes:\n value: |\n :no_entry_sign: If you want to report a security issue, please see [our Security Policy](https://syncthing.net/security/) and do not report the issue here.\n\n :interrobang: If you are not sure if there is a bug, but something isn't working right and you need help, please [use the forum](https://forum.syncthing.net/).\n\n - type: textarea\n id: what-happened\n attributes:\n label: What happened?\n description: Also tell us, what did you expect to happen, and any steps we might use to reproduce the problem.\n placeholder: Tell us what you see!\n validations:\n required: true\n\n - type: input\n id: version\n attributes:\n label: Syncthing version\n description: What version of Syncthing are you running?\n placeholder: v1.27.4\n validations:\n required: true\n\n - type: input\n id: platform\n attributes:\n label: Platform & operating system\n description: On what platform(s) are you seeing the problem?\n placeholder: Linux arm64\n validations:\n required: true\n\n - type: input\n id: browser\n attributes:\n label: Browser version\n description: If the problem is related to the GUI, describe your browser and version.\n placeholder: Safari 17.3.1\n\n - type: textarea\n id: logs\n attributes:\n label: Relevant log output\n description: Please copy and paste any relevant log output or crash backtrace. This will be automatically formatted into code, so no need for backticks.\n render: shell\n
|
dataset_sample\yaml\syncthing_syncthing\.github\ISSUE_TEMPLATE\02-bug.yml
|
02-bug.yml
|
YAML
| 1,718 | 0.95 | 0.058824 | 0 |
python-kit
| 351 |
2025-03-11T19:03:21.523929
|
GPL-3.0
| false |
4da934a2403ac2fc56150d5b480303f8
|
blank_issues_enabled: false\ncontact_links:\n - name: I need help / I have a question\n url: https://forum.syncthing.net/\n about: Ask questions, get support, and discuss with other community members.\n - name: Android issues\n url: https://github.com/syncthing/syncthing-android/issues/\n about: The Android app has its own issue tracker.\n
|
dataset_sample\yaml\syncthing_syncthing\.github\ISSUE_TEMPLATE\config.yml
|
config.yml
|
YAML
| 347 | 0.8 | 0 | 0 |
node-utils
| 732 |
2024-12-05T05:12:36.925308
|
Apache-2.0
| false |
64863f4a524a54d52d9d4019f50593e1
|
coverage:\n status:\n project:\n default:\n threshold: 0%\n patch:\n default:\n target: 75%\n threshold: 0%
|
dataset_sample\yaml\TabbyML_tabby\codecov.yml
|
codecov.yml
|
YAML
| 137 | 0.7 | 0 | 0 |
python-kit
| 63 |
2024-11-30T04:04:40.265702
|
BSD-3-Clause
| false |
2a8ce6bed1ce5021800a1f971008cc11
|
name: ast-grep lint\non: [push]\n\njobs:\n sg-lint:\n runs-on: ubuntu-latest\n name: ast-grep-lint\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n\n - name: ast-grep lint step\n uses: ast-grep/[email protected]\n with:\n version: 0.20.1
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\ast-grep-lint.yml
|
ast-grep-lint.yml
|
YAML
| 276 | 0.7 | 0 | 0 |
python-kit
| 6 |
2023-09-27T09:51:53.142405
|
Apache-2.0
| false |
dcfadf764c8512865e54b2f184cbae12
|
name: autofix.ci\n\non:\n pull_request:\n branches: ["main" ]\n paths:\n - '.github/workflows/autofix-pnpm.yml'\n - 'clients/**'\n - 'ee/tabby-ui/**'\n\npermissions:\n contents: read\n\nconcurrency:\n group: ${{ github.workflow_ref }}-${{ github.head_ref || github.ref_name }}\n\n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\njobs:\n autofix:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n\n - name: Install Node.js\n uses: actions/setup-node@v4\n with:\n node-version: 18\n\n - uses: pnpm/action-setup@v4\n name: Install pnpm\n with:\n version: 9\n run_install: false\n\n - name: Get pnpm store directory\n shell: bash\n run: |\n echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV\n\n - uses: actions/cache@v4\n name: Setup pnpm cache\n with:\n path: ${{ env.STORE_PATH }}\n key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}\n restore-keys: |\n ${{ runner.os }}-pnpm-store-\n\n - name: Install dependencies\n run: pnpm install\n\n - name: Fix lint\n run: pnpm lint:fix\n\n - uses: autofix-ci/action@551dded8c6cc8a1054039c8bc0b8b48c51dfc6ef\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\autofix-pnpm.yml
|
autofix-pnpm.yml
|
YAML
| 1,330 | 0.8 | 0 | 0.022222 |
node-utils
| 617 |
2023-09-13T18:11:56.796651
|
BSD-3-Clause
| false |
1fc1a54458dc3c2d8a33f2c85c437526
|
name: autofix.ci\n\non:\n pull_request:\n branches: [ "main" ]\n paths:\n - '.github/workflows/autofix-python.yml'\n - 'python/**'\n\npermissions:\n contents: read\n\nconcurrency:\n group: ${{ github.workflow_ref }}-${{ github.head_ref || github.ref_name }} \n \n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\njobs:\n autofix:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n with:\n submodules: recursive\n\n - uses: chartboost/ruff-action@v1\n with:\n src: "./python"\n args: --fix\n\n - uses: autofix-ci/action@551dded8c6cc8a1054039c8bc0b8b48c51dfc6ef\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\autofix-python.yml
|
autofix-python.yml
|
YAML
| 679 | 0.8 | 0 | 0.04 |
awesome-app
| 799 |
2025-03-02T15:36:13.313429
|
BSD-3-Clause
| false |
734752b030c6132be2ff018aed5923d4
|
name: autofix.ci\n\non:\n pull_request:\n branches: ["main" ]\n paths:\n - '.github/workflows/autofix-rust.yml'\n - 'Cargo.toml'\n - 'Cargo.lock'\n - 'crates/**'\n - 'ee/**'\n - '!ee/tabby-ui/**'\n - '!ee/tabby-email/**'\n\npermissions:\n contents: read\n\nconcurrency:\n group: ${{ github.workflow_ref }}-${{ github.head_ref || github.ref_name }}\n\n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\njobs:\n autofix:\n env:\n CARGO_TERM_COLOR: always\n SCCACHE_GHA_ENABLED: true\n RUSTC_WRAPPER: sccache\n CARGO_INCREMENTAL: 0\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n with:\n submodules: recursive\n\n - name: Install Rust\n uses: actions-rs/toolchain@v1\n with:\n toolchain: nightly\n components: rustfmt, clippy\n\n - name: Sccache cache\n uses: mozilla-actions/[email protected]\n\n - name: Install cargo-machete\n uses: actions-rs/cargo@v1\n with:\n command: install\n args: --version 0.7.0 cargo-machete\n\n - name: Cargo registry cache\n uses: actions/cache@v4\n with:\n key: cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.sha }}\n restore-keys: |\n cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-\n cargo-${{ runner.os }}-\n path: |\n ~/.cargo/registry\n ~/.cargo/git\n\n - run: sudo bash ./ci/prepare_build_environment.sh\n\n - run: make fix\n\n - run: make update-graphql-schema\n\n - uses: autofix-ci/action@551dded8c6cc8a1054039c8bc0b8b48c51dfc6ef\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\autofix-rust.yml
|
autofix-rust.yml
|
YAML
| 1,695 | 0.8 | 0 | 0.017857 |
awesome-app
| 828 |
2025-02-21T21:34:22.113390
|
MIT
| false |
be1739f097a12fedf160155ad4dafbc8
|
on: # rebuild any PRs and main branch changes\n pull_request:\n branches: ["main"]\n paths:\n - '.github/workflows/bloat.yml'\n - 'Cargo.toml'\n - 'Cargo.lock'\n - 'crates/**'\n - 'ee/**'\n - '!ee/tabby-ui/**'\n push:\n branches:\n - main\n\nname: bloat\n\npermissions: write-all\n\nconcurrency:\n group: ${{ github.workflow_ref }}-${{ github.head_ref || github.ref_name }}\n\n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\njobs:\n cargo_bloat:\n env:\n CARGO_TERM_COLOR: always\n SCCACHE_GHA_ENABLED: true\n RUSTC_WRAPPER: sccache\n CARGO_INCREMENTAL: 0\n\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v3\n with:\n submodules: recursive\n\n - name: Install Rust\n uses: actions-rs/toolchain@v1\n with:\n toolchain: stable\n\n - name: Sccache cache\n uses: mozilla-actions/[email protected]\n\n - name: Cargo registry cache\n uses: actions/cache@v4\n with:\n key: cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.sha }}\n restore-keys: |\n cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-\n cargo-${{ runner.os }}-\n path: |\n ~/.cargo/registry\n ~/.cargo/git\n\n - run: sudo bash ./ci/prepare_build_environment.sh\n\n - name: Run cargo bloat\n uses: wsxiaoys/cargo-bloat-action@master\n with:\n token: ${{ secrets.GITHUB_TOKEN }}\n include_packages: tabby
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\bloat.yml
|
bloat.yml
|
YAML
| 1,566 | 0.8 | 0 | 0.018868 |
vue-tools
| 320 |
2024-07-18T02:33:14.494729
|
MIT
| false |
098d63ba00dc051907dc459b19d5f2cf
|
name: Create and publish docker image\n\non:\n workflow_dispatch:\n schedule:\n - cron: "0 20 */1 * *"\n push:\n tags:\n - "v*"\n - "!*-dev.*"\n - "!vscode@*"\n - '!vim@*'\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.head_ref || github.ref_name }}\n\n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\nenv:\n RUST_TOOLCHAIN: 1.82.0\n\njobs:\n release-docker:\n runs-on: buildjet-2vcpu-ubuntu-2204\n permissions:\n contents: read\n packages: write\n # This is used to complete the identity challenge\n # with sigstore/fulcio when running outside of PRs.\n id-token: write\n\n strategy:\n matrix:\n device-type: [cuda]\n include:\n - device-type: cuda\n image-suffix: ""\n\n steps:\n - name: Free Disk Space (Ubuntu)\n uses: jlumbroso/free-disk-space@main\n with:\n # this might remove tools that are actually needed,\n # if set to "true" but frees about 6 GB\n tool-cache: true\n\n # all of these default to true, but feel free to set to\n # "false" if necessary for your workflow\n android: true\n dotnet: true\n haskell: true\n large-packages: false\n swap-storage: true\n\n - name: Checkout repository\n uses: actions/checkout@v4\n with:\n submodules: recursive\n\n # Workaround: https://github.com/docker/build-push-action/issues/461\n - name: Setup Docker buildx\n uses: docker/setup-buildx-action@v3\n\n # Login against a Docker registry except on PR\n # https://github.com/docker/login-action\n - name: Log into GitHub Container registry\n uses: docker/login-action@v3\n with:\n registry: ghcr.io\n username: ${{ github.actor }}\n password: ${{ secrets.GITHUB_TOKEN }}\n\n - name: Log into Docker Hub\n uses: docker/login-action@v3\n with:\n username: ${{ secrets.DOCKERHUB_USERNAME }}\n password: ${{ secrets.DOCKERHUB_TOKEN }}\n\n - name: Generate image name\n env:\n IMAGE_SUFFIX: ${{ matrix.image-suffix }}\n run: |\n echo "IMAGE_NAME=${GITHUB_REPOSITORY,,}${IMAGE_SUFFIX}" >>${GITHUB_ENV}\n\n - uses: int128/docker-build-cache-config-action@v1\n id: cache\n with:\n image: ghcr.io/${{ env.IMAGE_NAME }}/cache\n\n - name: Docker meta\n id: meta\n uses: docker/metadata-action@v5\n with:\n # list of Docker images to use as base name for tags\n images: |\n ghcr.io/${{ env.IMAGE_NAME }}\n ${{ env.IMAGE_NAME }}\n # generate Docker tags based on the following events/attributes\n tags: |\n type=raw,value={{branch}}-{{sha}},enable=${{ startsWith(github.ref, 'refs/heads') }}\n type=schedule,pattern=nightly\n type=schedule,pattern={{date 'YYYYMMDD'}}\n type=semver,pattern={{version}}\n\n # Build and push Docker image with Buildx (don't push on PR)\n # https://github.com/docker/build-push-action\n - name: Build and push Docker image\n id: build-and-push\n uses: docker/build-push-action@v5\n with:\n file: docker/Dockerfile.${{ matrix.device-type }}\n push: true\n context: .\n tags: ${{ steps.meta.outputs.tags }}\n labels: ${{ steps.meta.outputs.labels }}\n cache-from: ${{ steps.cache.outputs.cache-from }}\n cache-to: ${{ steps.cache.outputs.cache-to }}\n build-args: RUST_TOOLCHAIN=${{ env.RUST_TOOLCHAIN }}\n\n - name: Docker Hub Description\n uses: peter-evans/dockerhub-description@v4\n with:\n username: ${{ secrets.DOCKERHUB_USERNAME }}\n password: ${{ secrets.DOCKERHUB_TOKEN }}\n repository: tabbyml/tabby\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\docker.yml
|
docker.yml
|
YAML
| 3,871 | 0.8 | 0.031746 | 0.12844 |
node-utils
| 706 |
2025-05-11T01:47:10.344529
|
BSD-3-Clause
| false |
8541d4e30489c76800526f4040682aa9
|
name: Deploy static content to Pages\n\non:\n # Runs on pushes targeting the default branch\n push:\n branches: [main]\n paths:\n - '.github/workflows/gh-pages.yml'\n - 'website/**'\n\n # Allows you to run this workflow manually from the Actions tab\n workflow_dispatch:\n\n# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages\npermissions:\n contents: read\n pages: write\n id-token: write\n\n# Allow one concurrent deployment\nconcurrency:\n group: "pages"\n cancel-in-progress: true\n\nenv:\n # Hosted GitHub runners have 7 GB of memory available, let's use 6 GB\n NODE_OPTIONS: --max-old-space-size=6144\n\njobs:\n # Single deploy job since we're just deploying\n deploy:\n environment:\n name: github-pages\n url: ${{ steps.deployment.outputs.page_url }}\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n with:\n lfs: true\n - run: git lfs checkout\n - name: Set up Node.js\n uses: actions/setup-node@v3\n with:\n node-version: 18.x\n cache: yarn\n cache-dependency-path: website/yarn.lock\n - name: Install dependencies\n working-directory: website\n run: yarn install --frozen-lockfile --non-interactive\n - name: Build\n working-directory: website\n run: yarn build\n - name: Setup Pages\n uses: actions/configure-pages@v5\n - name: Upload artifact\n uses: actions/upload-pages-artifact@v3\n with:\n path: website/build\n - name: Deploy to GitHub Pages\n id: deployment\n uses: actions/deploy-pages@v4\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\gh-pages.yml
|
gh-pages.yml
|
YAML
| 1,628 | 0.8 | 0 | 0.107143 |
vue-tools
| 660 |
2024-09-19T00:24:24.702043
|
MIT
| false |
8ff0a55d174598716d5ca8aa5c0ee243
|
name: GPT Translate\n\non:\n issue_comment:\n types: [ created ]\n\npermissions:\n pull-requests: write\n issues: write\n contents: write\n\njobs:\n gpt_translate:\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Run GPT Translate\n if: |\n contains(github.event.comment.body, '/gpt-translate') || \n contains(github.event.comment.body, '/gt')\n uses: 3ru/gpt-translate@master\n with:\n apikey: ${{ secrets.OPENAI_API_KEY }}\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\gpt-translate.yml
|
gpt-translate.yml
|
YAML
| 502 | 0.7 | 0.04 | 0 |
awesome-app
| 674 |
2023-09-09T17:47:56.004511
|
Apache-2.0
| false |
87603d6035a2815f2a6fc4decf4bb7f5
|
name: Update nightly tag\n\non:\n workflow_dispatch:\n schedule:\n - cron: '0 20 */1 * *'\n\njobs:\n sync-branches:\n runs-on: ubuntu-latest\n name: Update nightly tag\n steps:\n - name: Checkout\n uses: actions/checkout@v2\n with:\n ssh-key: "${{ secrets.COMMIT_KEY }}"\n \n - name: Run latest-tag\n uses: EndBug/latest-tag@latest\n with:\n ref: nightly\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\nightly.yml
|
nightly.yml
|
YAML
| 415 | 0.7 | 0 | 0 |
python-kit
| 205 |
2023-08-30T23:33:43.883969
|
Apache-2.0
| false |
0dd1bc726016a96226fdb921a193afc9
|
name: Release Tabby Plugin for IntelliJ Platform\n\non:\n workflow_dispatch:\n push:\n tags:\n - "intellij@*"\n\nconcurrency:\n group: ${{ github.workflow_ref }}-${{ github.head_ref || github.ref_name }}\n\n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\njobs:\n release-marketplace:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n with:\n lfs: true\n\n - name: Setup JDK\n uses: actions/setup-java@v3\n with:\n distribution: zulu\n java-version: 17\n\n - name: Install Node.js\n uses: actions/setup-node@v4\n with:\n node-version: 18\n\n - name: Install pnpm\n uses: pnpm/action-setup@v4\n with:\n version: 9\n run_install: false\n\n - name: Get pnpm store directory\n shell: bash\n run: |\n echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV\n\n - name: Setup pnpm cache\n uses: actions/cache@v4\n with:\n path: ${{ env.STORE_PATH }}\n key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}\n restore-keys: |\n ${{ runner.os }}-pnpm-store-\n\n - name: Install dependencies\n run: pnpm install\n\n - name: Determine Publish Channel\n run: |\n if [[ ${{ github.ref_name }} =~ ^intellij@[0-9]+\.[0-9]+\.[0-9]+$ ]]; then\n echo "PUBLISH_CHANNEL=stable" >> $GITHUB_ENV\n else\n echo "PUBLISH_CHANNEL=alpha" >> $GITHUB_ENV\n fi\n\n - name: Check Publish Channel\n run: echo "Publish Channel is ${{ env.PUBLISH_CHANNEL }}"\n\n - name: Publish Plugin to Marketplace\n env:\n CERTIFICATE_CHAIN: ${{ secrets.INTELLIJ_PLUGIN_CERTIFICATE_CHAIN }}\n PRIVATE_KEY: ${{ secrets.INTELLIJ_PLUGIN_PRIVATE_KEY }}\n PUBLISH_TOKEN: ${{ secrets.INTELLIJ_PLUGIN_PUBLISH_TOKEN }}\n PUBLISH_CHANNEL: ${{ env.PUBLISH_CHANNEL }}\n uses: gradle/[email protected]\n with:\n arguments: publishPlugin\n build-root-directory: clients/intellij\n\n release-github:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n with:\n lfs: true\n\n - name: Setup JDK\n uses: actions/setup-java@v3\n with:\n distribution: zulu\n java-version: 17\n\n - name: Install Node.js\n uses: actions/setup-node@v4\n with:\n node-version: 18\n\n - name: Install pnpm\n uses: pnpm/action-setup@v4\n with:\n version: 9\n run_install: false\n\n - name: Get pnpm store directory\n shell: bash\n run: |\n echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV\n\n - name: Setup pnpm cache\n uses: actions/cache@v4\n with:\n path: ${{ env.STORE_PATH }}\n key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}\n restore-keys: |\n ${{ runner.os }}-pnpm-store-\n\n - name: Install dependencies\n run: pnpm install\n\n - name: Build Signed Plugin\n env:\n CERTIFICATE_CHAIN: ${{ secrets.INTELLIJ_PLUGIN_CERTIFICATE_CHAIN }}\n PRIVATE_KEY: ${{ secrets.INTELLIJ_PLUGIN_PRIVATE_KEY }}\n uses: gradle/[email protected]\n with:\n arguments: signPlugin\n build-root-directory: clients/intellij\n\n - name: Determine is stable release\n run: |\n if [[ ${{ github.ref_name }} =~ ^intellij@[0-9]+\.[0-9]+\.[0-9]+$ ]]; then\n echo "STABLE_RELEASE=true" >> $GITHUB_ENV\n else\n echo "STABLE_RELEASE=false" >> $GITHUB_ENV\n fi\n\n - name: Check if stable release\n run: echo "Stable Release is ${{ env.STABLE_RELEASE }}"\n\n - name: Create GitHub Release\n uses: ncipollo/release-action@v1\n with:\n allowUpdates: true\n prerelease: ${{ env.STABLE_RELEASE == 'false' }}\n makeLatest: false\n tag: ${{ github.ref_name }}\n removeArtifacts: true\n artifacts: "clients/intellij/build/distributions/intellij-tabby-signed.zip"\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\release-intellij.yml
|
release-intellij.yml
|
YAML
| 4,264 | 0.8 | 0.026667 | 0.007937 |
node-utils
| 974 |
2025-02-08T09:28:33.954406
|
BSD-3-Clause
| false |
ffe393d793b1fd1569ba3920e2785860
|
name: Release Vim Plugin\n\non:\n workflow_dispatch:\n push:\n tags:\n - 'vim@*'\n\njobs:\n release:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout tabby repo\n uses: actions/checkout@v3\n with:\n lfs: true\n path: tabby\n - name: Checkout vim-tabby repo\n uses: actions/checkout@v3\n with:\n repository: TabbyML/vim-tabby\n token: ${{ secrets.VIM_RELEASE_GH_TOKEN }}\n lfs: true\n path: vim-tabby\n - name: Copy files\n run: |\n find vim-tabby/ -mindepth 1 ! -regex '^vim-tabby/\.git\(/.*\)?' -delete\n cp -r tabby/clients/vim/.gitignore vim-tabby/\n cp -r tabby/clients/vim/* vim-tabby/\n - name: Get version\n id: get_version\n run: |\n version=$(cat vim-tabby/autoload/tabby.vim | grep "let g:tabby_version" | sed -e 's/.*"\(.*\)".*/\1/')\n echo "::set-output name=version::$version"\n - name: Commit files\n run: |\n cd vim-tabby/\n git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"\n git config --local user.name "github-actions[bot]"\n git add .\n git commit -m "release: vim-tabby version ${{ steps.get_version.outputs.version }}"\n git tag ${{ steps.get_version.outputs.version }}\n - name: Push changes\n uses: ad-m/github-push-action@master\n with:\n repository: TabbyML/vim-tabby\n github_token: ${{ secrets.VIM_RELEASE_GH_TOKEN }}\n directory: vim-tabby\n force: true\n tags: true\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\release-vim.yml
|
release-vim.yml
|
YAML
| 1,526 | 0.8 | 0 | 0 |
node-utils
| 764 |
2024-10-01T19:52:44.358365
|
Apache-2.0
| false |
5da229177505df9d10e3fe42a223d610
|
name: Release vscode extension\n\non:\n workflow_dispatch:\n push:\n tags:\n - 'vscode@*'\n\nconcurrency:\n group: ${{ github.workflow_ref }}-${{ github.head_ref || github.ref_name }} \n \n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\njobs:\n publish-vscode:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n with:\n lfs: true\n\n - name: Install Node.js\n uses: actions/setup-node@v4\n with:\n node-version: 18\n\n - uses: pnpm/action-setup@v4\n name: Install pnpm\n with:\n version: 9\n run_install: false\n\n - name: Get pnpm store directory\n shell: bash\n run: |\n echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV\n\n - uses: actions/cache@v4\n name: Setup pnpm cache\n with:\n path: ${{ env.STORE_PATH }}\n key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}\n restore-keys: |\n ${{ runner.os }}-pnpm-store-\n\n - name: Install dependencies\n run: pnpm install\n\n - name: Publish\n run: cd clients/vscode && pnpm run $(node scripts/publish.cjs)\n env:\n VSCE_PAT: ${{ secrets.VSCE_PAT }}\n OVSX_PAT: ${{ secrets.OVSX_PAT }}\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\release-vscode.yml
|
release-vscode.yml
|
YAML
| 1,343 | 0.8 | 0 | 0.022222 |
node-utils
| 739 |
2023-08-11T19:07:26.251909
|
MIT
| false |
cead21cd35b9a27a03e322f78c7c8750
|
name: Build and release binaries.\n\non:\n workflow_dispatch:\n push:\n tags:\n - 'v*'\n - 'nightly'\n - "!*-dev.*"\n - '!vscode@*'\n - '!vim@*'\n pull_request:\n branches: [ "main" ]\n paths:\n - '.github/workflows/release.yml'\n\nconcurrency:\n group: ${{ github.workflow_ref }}-${{ github.head_ref || github.ref_name }}\n\n # If this is enabled it will cancel current running and start latest\n cancel-in-progress: true\n\nenv:\n RUST_TOOLCHAIN: 1.82.0\n\njobs:\n release-llama-server-binary:\n runs-on: ${{ matrix.os }}\n container: ${{ matrix.container }}\n strategy:\n matrix:\n binary:\n - aarch64-apple-darwin\n - x86_64-manylinux_2_28\n - x86_64-manylinux_2_28-cuda123\n - x86_64-manylinux_2_28-vulkan\n - x86_64-windows-msvc\n - x86_64-windows-msvc-vulkan\n # - x86_64-windows-msvc-cuda117\n # - x86_64-windows-msvc-cuda122\n include:\n - os: macos-latest\n target: aarch64-apple-darwin\n binary: aarch64-apple-darwin\n build_args: --features binary\n - os: buildjet-2vcpu-ubuntu-2204\n target: x86_64-unknown-linux-gnu\n binary: x86_64-manylinux_2_28\n container: quay.io/pypa/manylinux_2_28_x86_64\n build_args: --features binary\n - os: buildjet-2vcpu-ubuntu-2204\n target: x86_64-unknown-linux-gnu\n binary: x86_64-manylinux_2_28-cuda123\n container: sameli/manylinux_2_28_x86_64_cuda_12.3@sha256:e12416bf249ab312f9dcfdebd7939b968dd6f1b6f810abbede818df875e86a7c\n build_args: --features binary,cuda\n - os: buildjet-4vcpu-ubuntu-2204\n target: x86_64-unknown-linux-gnu\n binary: x86_64-manylinux_2_28-vulkan\n container: quay.io/pypa/manylinux_2_28_x86_64\n build_args: --features binary,vulkan\n vulkan_sdk: '1.3.239.0'\n - os: windows-2019\n target: x86_64-pc-windows-msvc\n binary: x86_64-windows-msvc\n build_args: --features binary\n ext: .exe\n - os: windows-2019\n target: x86_64-pc-windows-msvc\n binary: x86_64-windows-msvc-vulkan\n ext: .exe\n build_args: --features vulkan,binary\n vulkan_sdk: '1.3.280.0'\n # - os: windows-2019\n # target: x86_64-pc-windows-msvc\n # binary: x86_64-windows-msvc-cuda117\n # ext: .exe\n # build_args: --features cuda,binary\n # windows_cuda: '11.7.1'\n # - os: windows-2019\n # target: x86_64-pc-windows-msvc\n # binary: x86_64-windows-msvc-cuda122\n # ext: .exe\n # build_args: --features cuda,binary\n # windows_cuda: '12.2.0'\n\n env:\n SCCACHE_GHA_ENABLED: true\n RUSTC_WRAPPER: sccache\n CARGO_INCREMENTAL: 0\n ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true\n\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n with:\n submodules: recursive\n\n - name: Install Rust\n if: runner.os != 'Windows'\n uses: actions-rs/toolchain@v1\n with:\n toolchain: ${{ env.RUST_TOOLCHAIN }}\n target: ${{ matrix.target }}\n components: clippy\n\n - name: Install Rust for Windows\n if: runner.os == 'Windows'\n run: |\n rustup update --no-self-update ${{ env.RUST_TOOLCHAIN }}\n rustup target add ${{ matrix.target }}\n rustup component add clippy --toolchain ${{ env.RUST_TOOLCHAIN }}\n\n - name: Set default rust version\n run: rustup default ${{ env.RUST_TOOLCHAIN }}\n\n - name: Sccache cache\n uses: mozilla-actions/[email protected]\n\n - name: Cargo registry cache\n uses: actions/cache@v4\n with:\n key: cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.sha }}\n restore-keys: |\n cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-\n cargo-${{ runner.os }}-\n path: |\n ~/.cargo/registry\n ~/.cargo/git\n\n - name: Prepare build environment for macOS & Linux\n run: bash ./ci/prepare_build_environment.sh\n if: runner.os != 'Windows'\n\n - name: Prepare build environment for Windows\n run: ./ci/prepare_build_environment.ps1\n if: runner.os == 'Windows'\n\n - name: Install CUDA toolkit for Windows\n uses: Jimver/[email protected]\n with:\n cuda: ${{ matrix.windows_cuda }}\n method: 'network'\n sub-packages: '["nvcc", "cudart", "cublas", "cublas_dev", "thrust", "visual_studio_integration"]'\n if: runner.os == 'Windows' && matrix.windows_cuda != ''\n\n - name: Install Vulkan SDK\n uses: TabbyML/[email protected]\n with:\n version: ${{ matrix.vulkan_sdk }}\n cache: true\n if: matrix.vulkan_sdk != ''\n\n - name: Bulid release binary\n run: cargo build ${{ matrix.build_args }} --release --target ${{ matrix.target }} --package llama-cpp-server\n\n - name: Rename release binary\n run: mv target/${{ matrix.target }}/release/llama-server${{ matrix.ext }} llama-server_${{ matrix.binary }}${{ matrix.ext }}\n\n - name: Upload artifacts\n uses: actions/upload-artifact@v4\n with:\n retention-days: 3\n name: llama-server_${{ matrix.binary }}${{ matrix.ext }}\n path: llama-server_${{ matrix.binary }}${{ matrix.ext }}\n\n release-binary:\n runs-on: ${{ matrix.os }}\n container: ${{ matrix.container }}\n strategy:\n matrix:\n binary:\n - aarch64-apple-darwin\n - x86_64-manylinux_2_28\n - x86_64-windows-msvc\n include:\n - os: macos-latest\n target: aarch64-apple-darwin\n binary: aarch64-apple-darwin\n build_args: --no-default-features --features prod\n - os: buildjet-2vcpu-ubuntu-2204\n target: x86_64-unknown-linux-gnu\n binary: x86_64-manylinux_2_28\n container: quay.io/pypa/manylinux_2_28_x86_64\n build_args: --no-default-features --features static-ssl,prod\n - os: windows-latest\n target: x86_64-pc-windows-msvc\n binary: x86_64-windows-msvc\n build_args: --no-default-features --features prod\n ext: .exe\n\n env:\n SCCACHE_GHA_ENABLED: true\n RUSTC_WRAPPER: sccache\n CARGO_INCREMENTAL: 0\n ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true\n\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n with:\n submodules: recursive\n\n - name: Install Rust\n if: runner.os != 'Windows'\n uses: actions-rs/toolchain@v1\n with:\n toolchain: ${{ env.RUST_TOOLCHAIN }}\n target: ${{ matrix.target }}\n components: clippy\n\n - name: Install Rust for Windows\n if: runner.os == 'Windows'\n run: |\n rustup update --no-self-update ${{ env.RUST_TOOLCHAIN }}\n rustup target add ${{ matrix.target }}\n rustup component add clippy --toolchain ${{ env.RUST_TOOLCHAIN }}\n\n - name: Set default rust version\n run: rustup default ${{ env.RUST_TOOLCHAIN }}\n\n - name: Sccache cache\n uses: mozilla-actions/[email protected]\n\n - name: Cargo registry cache\n uses: actions/cache@v4\n with:\n key: cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.sha }}\n restore-keys: |\n cargo-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}-\n cargo-${{ runner.os }}-\n path: |\n ~/.cargo/registry\n ~/.cargo/git\n\n - name: Prepare build environment for macOS & Linux\n run: bash ./ci/prepare_build_environment.sh\n if: runner.os != 'Windows'\n\n - name: Prepare build environment for Windows\n run: ./ci/prepare_build_environment.ps1\n if: runner.os == 'Windows'\n\n - name: Build release binary\n run: cargo build ${{ matrix.build_args }} --release --target ${{ matrix.target }} --package tabby\n\n - name: Rename release binary\n run: mv target/${{ matrix.target }}/release/tabby${{ matrix.ext }} tabby_${{ matrix.binary }}${{ matrix.ext }}\n\n - name: Upload artifacts\n uses: actions/upload-artifact@v4\n with:\n retention-days: 3\n name: tabby_${{ matrix.binary }}${{ matrix.ext }}\n path: tabby_${{ matrix.binary }}${{ matrix.ext }}\n\n package-win-cuda:\n runs-on: ubuntu-latest\n needs: [release-binary]\n permissions:\n contents: write\n steps:\n - name: Checkout\n uses: actions/checkout@v3\n with:\n submodules: recursive\n\n - name: Download all artifacts\n uses: actions/download-artifact@v4\n\n - name: Display structure of downloaded files\n run: ls -R\n\n - name: Package CUDA 11.7\n run: >\n LLAMA_CPP_PLATFORM=cuda-cu11.7-x64 OUTPUT_NAME=tabby_x86_64-windows-msvc-cuda117 ./ci/package-win.sh\n\n - name: Package CUDA 12.4\n run: >\n LLAMA_CPP_PLATFORM=cuda-cu12.4-x64 OUTPUT_NAME=tabby_x86_64-windows-msvc-cuda124 ./ci/package-win.sh\n\n - name: Upload artifacts\n uses: actions/upload-artifact@v4\n with:\n retention-days: 3\n name: dist\n path: dist/\n\n\n pre-release:\n needs: [release-llama-server-binary, release-binary, package-win-cuda]\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - name: Download all artifacts\n uses: actions/download-artifact@v4\n\n - name: Display structure of downloaded files\n run: ls -R\n\n - name: Creating distribution bundles\n run: |\n get_file_extension() {\n local filename="$1"\n # Check if the file has an extension\n if [[ "$filename" == *.* && ! "$filename" == .* ]]; then\n echo ".${filename##*.}"\n else\n echo ""\n fi\n }\n\n dist_dir=$(pwd)/dist\n mkdir -p $dist_dir\n\n for llama_server in llama-server_*/llama-server_*; do\n for tabby in tabby_*/tabby_*; do\n llamab=$(basename $llama_server)\n tabbyb=$(basename $tabby)\n extname=$(get_file_extension $tabbyb)\n\n llaman=${llamab%.*}\n tabbyn=${tabbyb%.*}\n\n llamav=${llaman#llama-server_}\n tabbyv=${tabbyn#tabby_}\n\n if [[ $llamav == *"$tabbyv"* ]]; then\n echo "Creating bundle for $llamav"\n\n # the downloaded files may have the same folder name with release_dir\n # put the release files in a new folder\n build_dir=build\n release_name=tabby_${llamav}\n release_dir=$build_dir/$release_name\n mkdir -p $release_dir\n cp $llama_server $release_dir/llama-server${extname}\n cp $tabby $release_dir/tabby${extname}\n\n pushd $build_dir\n # Release zip for Windows, tar.gz for macOS and Linux\n # use `extname` to determine the platform\n if [[ "$extname" == ".exe" ]]; then\n zip -r $release_name.zip $release_name\n mv $release_name.zip $dist_dir/\n else\n chmod +x $release_name/llama-server${extname} $release_name/tabby${extname}\n tar zcvf $release_name.tar.gz $release_name\n mv $release_name.tar.gz $dist_dir/\n fi\n rm -rf "$release_name"\n popd\n fi\n done\n done\n\n - name: Display structure of created files\n run: ls -R dist\n\n - uses: ncipollo/release-action@v1\n if: github.event_name == 'push'\n with:\n allowUpdates: true\n prerelease: true\n makeLatest: false\n artifacts: "dist/tabby_*.zip,dist/tabby_*.tar.gz"\n tag: ${{ github.ref_name }}\n removeArtifacts: true\n
|
dataset_sample\yaml\TabbyML_tabby\.github\workflows\release.yml
|
release.yml
|
YAML
| 12,149 | 0.8 | 0.07337 | 0.063091 |
vue-tools
| 147 |
2025-05-16T05:25:51.638999
|
Apache-2.0
| false |
277f4310e135ea4c3b4dcf48f4e49ad1
|
name: tabby\nroot: ./\n\nwindows:\n - caddy:\n panes:\n - caddy run --watch --config ee/tabby-webserver/development/Caddyfile\n - server:\n layout: even-horizontal\n panes:\n - cargo run serve --port 8081\n - cd ee/tabby-ui && pnpm dev\n
|
dataset_sample\yaml\TabbyML_tabby\.tmuxinator\tabby.yml
|
tabby.yml
|
YAML
| 263 | 0.7 | 0 | 0 |
vue-tools
| 747 |
2025-06-19T06:38:45.206728
|
Apache-2.0
| false |
10a6dc1e3eb2147163290304b5f6ef36
|
id: do-not-use-logkit-crate\nmessage: Don't use logkit crate with use statement to avoid conflicts with the tracing crate. logkit crate is only used for background job logging to enrich the jobs output in admin UI.\nseverity: error\nlanguage: rust\nfiles:\n- ./**\nrule:\n pattern: use logkit::$$$;
|
dataset_sample\yaml\TabbyML_tabby\rules\do-not-use-logkit-crate.yml
|
do-not-use-logkit-crate.yml
|
YAML
| 293 | 0.8 | 0.142857 | 0 |
vue-tools
| 920 |
2025-04-13T12:34:42.984248
|
BSD-3-Clause
| false |
960602b3a90608dd1e11dc155cd0ac3c
|
id: do-not-use-next-pages\nmessage: Don't use next pages routing as we're fully commited to app router.\nseverity: error\nlanguage: typescript\nfiles:\n- ./ee/tabby-ui/**\nrule:\n pattern: import $$$ from 'next/router'
|
dataset_sample\yaml\TabbyML_tabby\rules\do-not-use-next-pages.yml
|
do-not-use-next-pages.yml
|
YAML
| 212 | 0.95 | 0 | 0 |
vue-tools
| 782 |
2024-10-07T09:17:22.398733
|
BSD-3-Clause
| false |
1f90d86c4aa8162e2fa3347067f5deb3
|
id: only-dao-can-depend-tabby-db\nmessage: Only dao can depend on tabby-db\nseverity: error\nlanguage: rust\nfiles:\n- ./ee/tabby-schema/src/**\nignores:\n- ./ee/tabby-schema/src/dao.rs\n- ./ee/tabby-schema/src/policy.rs\nrule:\n pattern: tabby_db\n
|
dataset_sample\yaml\TabbyML_tabby\rules\only-dao-and-policy-can-depend-tabby-db.yml
|
only-dao-and-policy-can-depend-tabby-db.yml
|
YAML
| 239 | 0.8 | 0 | 0 |
python-kit
| 86 |
2025-04-20T13:57:36.541997
|
Apache-2.0
| false |
4092a2a98a7218e64a7d94db545b3c32
|
id: use-basic-job\nmessage: Use BasicJob / CronJob for worker creation.\nseverity: error\nlanguage: rust\nfiles:\n- ./ee/tabby-webserver/src/service/background_job/**\nignores:\n- ./ee/tabby-webserver/src/service/background_job/helper/mod.rs\nrule:\n pattern: WorkerBuilder
|
dataset_sample\yaml\TabbyML_tabby\rules\use-basic-job.yml
|
use-basic-job.yml
|
YAML
| 265 | 0.8 | 0.111111 | 0 |
vue-tools
| 980 |
2024-06-21T05:01:10.818666
|
Apache-2.0
| false |
fd7eff8a1a81c0777f4fdbd785345e08
|
id: use-schema-result\nmessage: Use schema::Result as API interface\nseverity: error\nlanguage: rust\nfiles:\n- ./ee/tabby-schema/src/**\nignores:\n- ./ee/tabby-schema/src/lib.rs\n- ./ee/tabby-schema/src/dao.rs\nrule:\n any:\n - pattern: anyhow\n not:\n inside:\n kind: enum_variant\n stopBy: end\n - pattern: FieldResult
|
dataset_sample\yaml\TabbyML_tabby\rules\use-schema-result.yml
|
use-schema-result.yml
|
YAML
| 342 | 0.8 | 0 | 0 |
react-lib
| 589 |
2024-06-15T02:36:08.747699
|
BSD-3-Clause
| false |
793ceaf1657eb3a7f7e90c0187cf80c7
|
id: validate-requires-code\nmessage: Validations requires code / message being set for frontend error display\nseverity: error\nlanguage: rust\nfiles:\n - ./ee/tabby-webserver/src/**\n - ./ee/tabby-schema/src/**\nrule:\n all:\n - pattern: "#[validate]"\n - not:\n all:\n - has:\n stopBy: end\n pattern: code\n - has:\n stopBy: end\n pattern: message\n - not:\n any:\n - has:\n stopBy: end\n pattern: custom\n - has:\n stopBy: end\n pattern: nested\n - has:\n stopBy: end\n pattern: schema\n
|
dataset_sample\yaml\TabbyML_tabby\rules\validate-requires-code.yml
|
validate-requires-code.yml
|
YAML
| 662 | 0.95 | 0.034483 | 0 |
node-utils
| 100 |
2025-04-10T13:21:06.369791
|
MIT
| false |
bebd9319a3819a3d46b3ed0c6d7e83bd
|
meng:\n name: Meng Zhang\n url: https://github.com/wsxiaoys\n image_url: https://github.com/wsxiaoys.png\n\ngyxlucy:\n name: Lucy Gao\n url: https://github.com/gyxlucy\n image_url: https://github.com/gyxlucy.png\n\nicycodes:\n name: Zhiming Ma\n url: https://github.com/icycodes\n image_url: https://github.com/icycodes.png\n\nwwayne:\n name: Wayne Wang\n url: https://github.com/wwayne\n image_url: https://github.com/wwayne.png\n\nboxbeam:\n name: June Marcuse\n url: https://github.com/boxbeam\n image_url: https://github.com/boxbeam.png\n
|
dataset_sample\yaml\TabbyML_tabby\website\blog\authors.yml
|
authors.yml
|
YAML
| 533 | 0.8 | 0 | 0 |
vue-tools
| 270 |
2025-04-28T06:56:15.606126
|
Apache-2.0
| false |
0e0b524a36b83160f957c16dd9d3bd1e
|
version: '3.5'\n\nservices:\n worker-0:\n restart: always\n image: tabbyml/tabby\n command: serve --model TabbyML/StarCoder-1B --device cuda --no-webserver\n volumes:\n - "$HOME/.tabby:/data"\n deploy:\n resources:\n reservations:\n devices:\n - driver: nvidia\n device_ids: ["0"]\n capabilities: [gpu]\n\n worker-1:\n restart: always\n image: tabbyml/tabby\n command: serve --model TabbyML/StarCoder-1B --device cuda --no-webserver\n volumes:\n - "$HOME/.tabby:/data"\n deploy:\n resources:\n reservations:\n devices:\n - driver: nvidia\n device_ids: ["1"]\n capabilities: [gpu]\n\n web:\n image: caddy\n volumes:\n - "./Caddyfile:/etc/caddy/Caddyfile:ro"\n ports:\n - "8080:8080"\n
|
dataset_sample\yaml\TabbyML_tabby\website\blog\2024-03-26-tabby-with-replicas-behind-reverse-proxy\docker-compose.yml
|
docker-compose.yml
|
YAML
| 824 | 0.7 | 0 | 0 |
python-kit
| 396 |
2024-09-01T05:23:57.367279
|
MIT
| false |
2e52731bee9b1fe59e732c3e7e9db3d5
|
version: "2"\n# Configuration for how we run golangci-lint\n# Timeout of 5m was the default in v1.\nrun:\n timeout: 5m\nlinters:\n # Don't enable any linters by default; just the ones that we explicitly\n # enable in the list below.\n default: none\n enable:\n - bidichk\n - govet\n - misspell\n - revive\n settings:\n # Matches what we use in corp as of 2023-12-07\n govet:\n enable:\n - asmdecl\n - assign\n - atomic\n - bools\n - buildtag\n - cgocall\n - copylocks\n - deepequalerrors\n - errorsas\n - framepointer\n - httpresponse\n - ifaceassert\n - loopclosure\n - lostcancel\n - nilfunc\n - nilness\n - printf\n - reflectvaluecompare\n - shift\n - sigchanyzer\n - sortslice\n - stdmethods\n - stringintconv\n - structtag\n - testinggoroutine\n - tests\n - unmarshal\n - unreachable\n - unsafeptr\n - unusedresult\n settings:\n printf:\n # List of print function names to check (in addition to default)\n funcs:\n - github.com/tailscale/tailscale/types/logger.Discard\n # NOTE(andrew-d): this doesn't currently work because the printf\n # analyzer doesn't support type declarations\n #- github.com/tailscale/tailscale/types/logger.Logf\n revive:\n enable-all-rules: false\n rules:\n - name: atomic\n - name: context-keys-type\n - name: defer\n arguments: [[\n # Calling 'recover' at the time a defer is registered (i.e. "defer recover()") has no effect.\n "immediate-recover",\n # Calling 'recover' outside of a deferred function has no effect\n "recover",\n # Returning values from a deferred function has no effect\n "return",\n ]]\n - name: duplicated-imports\n - name: errorf\n - name: string-of-int\n - name: time-equal\n - name: unconditional-recursion\n - name: useless-break\n - name: waitgroup-by-value\n exclusions:\n generated: lax\n presets:\n - comments\n - common-false-positives\n - legacy\n - std-error-handling\n rules:\n # These are forks of an upstream package and thus are exempt from stylistic\n # changes that would make pulling in upstream changes harder.\n - path: tempfork/.*\.go\n text: File is not `gofmt`-ed with `-s` `-r 'interface{} -> any'`\n - path: util/singleflight/.*\.go\n text: File is not `gofmt`-ed with `-s` `-r 'interface{} -> any'`\n paths:\n - third_party$\n - builtin$\n - examples$\nformatters:\n enable:\n - gofmt\n - goimports\n settings:\n gofmt:\n rewrite-rules:\n - pattern: interface{}\n replacement: any\n exclusions:\n generated: lax\n paths:\n - third_party$\n - builtin$\n - examples$\n
|
dataset_sample\yaml\tailscale_tailscale\.golangci.yml
|
.golangci.yml
|
YAML
| 2,965 | 0.95 | 0.036364 | 0.127273 |
react-lib
| 203 |
2024-03-28T12:16:53.955387
|
GPL-3.0
| false |
f85df84ae719b162993038cba04af0c7
|
# To get started with Dependabot version updates, you'll need to specify which\n# package ecosystems to update and where the package manifests are located.\n# Please see the documentation for all configuration options:\n# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates\n\nversion: 2\nupdates:\n - package-ecosystem: "github-actions" # See documentation for possible values\n directory: "/" # Location of package manifests\n schedule:\n interval: "daily"\n
|
dataset_sample\yaml\talkgo_night\.github\dependabot.yml
|
dependabot.yml
|
YAML
| 512 | 0.8 | 0.272727 | 0.4 |
node-utils
| 618 |
2024-02-25T12:37:19.243096
|
BSD-3-Clause
| false |
b03333fc1c40567ae3835206f597ad2e
|
# Configuration for weekly-digest - https://github.com/apps/weekly-digest\npublishDay: "thursday"\ncanPublishIssues: true\ncanPublishPullRequests: true\ncanPublishContributors: true\ncanPublishStargazers: true\ncanPublishCommits: true
|
dataset_sample\yaml\talkgo_night\.github\weekly-digest.yml
|
weekly-digest.yml
|
YAML
| 228 | 0.8 | 0.166667 | 0.142857 |
vue-tools
| 698 |
2024-01-05T20:07:56.300105
|
MIT
| false |
ea183a67251aacb795092741a10badba
|
name: Links\n\non:\n repository_dispatch:\n workflow_dispatch:\n schedule:\n - cron: "0 0 1 * *"\n\njobs:\n linkChecker:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n\n - name: Link Checker\n uses: lycheeverse/[email protected]\n with:\n args: --verbose --no-progress **/*.md **/*.html\n env:\n GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}\n\n - name: Create Issue From File\n uses: peter-evans/create-issue-from-file@v5\n with:\n title: Link Checker Report\n content-filepath: ./lychee/out.md\n labels: report, automated issue
|
dataset_sample\yaml\talkgo_night\.github\workflows\links.yml
|
links.yml
|
YAML
| 631 | 0.8 | 0 | 0 |
python-kit
| 595 |
2024-12-22T09:30:20.953719
|
MIT
| false |
f89a235c6040b749ff3117ecb0632c68
|
name: auto_monthly_recommend\n\non:\n schedule:\n - cron: "0 12 1 * *"\n\njobs:\n build:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n - name: build-go\n run: |\n go env -w GO111MODULE=on\n cd actions && go build -o monthly monthly.go\n - name: create-issue\n env:\n GITHUB_TOKEN: ${{ secrets.TOKEN }}\n run: cd actions &&./monthly -c ./issuesinfo.json\n
|
dataset_sample\yaml\talkgo_night\.github\workflows\monthly.yml
|
monthly.yml
|
YAML
| 431 | 0.7 | 0 | 0 |
react-lib
| 186 |
2025-02-22T10:12:59.655619
|
GPL-3.0
| false |
198f89c2a30a3027f733bd6155640d99
|
version: "3"\n\nservices:\n db:\n container_name: postgres\n image: postgres\n restart: always\n ports:\n - 5432:5432\n volumes:\n - ./volumes/postgresql:/var/lib/postgresql/data\n environment:\n - POSTGRES_USER=postgres\n - POSTGRES_DB=ginexamples\n
|
dataset_sample\yaml\talkgo_night\examples\gin_examples\docker-compose-dev.yml
|
docker-compose-dev.yml
|
YAML
| 275 | 0.7 | 0 | 0 |
awesome-app
| 469 |
2023-12-19T22:05:16.917703
|
GPL-3.0
| false |
9136d35641bd28e18aa5a96beeba83c0
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nversion: 2\nupdates:\n # Crates\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-build'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-codegen'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-macros'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-runtime'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-runtime-wry'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-utils'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-cli'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-bundler'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'cargo'\n directory: '/crates/tauri-macos-sign'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n\n # NPM Packages\n - package-ecosystem: 'npm'\n directory: '/packages/api'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n - package-ecosystem: 'npm'\n directory: '/packages/cli'\n schedule:\n internal: 'daily'\n labels:\n - 'type: chore'\n # disable version updates\n open-pull-requests-limit: 0\n
|
dataset_sample\yaml\tauri-apps_tauri\dependabot.yml
|
dependabot.yml
|
YAML
| 2,606 | 0.8 | 0 | 0.165049 |
python-kit
| 125 |
2024-04-30T16:38:15.161516
|
MIT
| false |
dd52ee2b469d3adf867c97ed9058a960
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\n# These are supported funding model platforms\n\ngithub: tauri-apps\npatreon: #\nopen_collective: tauri\nko_fi: # Replace with a single Ko-fi username\ntidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel\ncustom: # Replace with a single custom sponsorship URL\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\FUNDING.yml
|
FUNDING.yml
|
YAML
| 426 | 0.8 | 0 | 0.4 |
python-kit
| 596 |
2024-01-15T19:47:20.975934
|
GPL-3.0
| false |
f716b4c5aa0ab8fa8f24235b9e2a2378
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: 🐞 Bug Report\ntitle: '[bug] '\ndescription: Report a bug\nlabels: ['type: bug', 'status: needs triage']\n\nbody:\n - type: markdown\n attributes:\n value: |\n ## First of all\n 1. Please search for [existing issues](https://github.com/tauri-apps/tauri/issues?q=is%3Aissue) about this problem first.\n 2. Make sure `rustc` and all relevant Tauri packages are up to date.\n 3. Make sure it's an issue with Tauri and not something else you are using.\n 4. Remember to follow our community guidelines and be friendly.\n\n - type: textarea\n id: description\n attributes:\n label: Describe the bug\n description: A clear description of what the bug is. Include screenshots if applicable.\n placeholder: Bug description\n validations:\n required: true\n\n - type: textarea\n id: reproduction\n attributes:\n label: Reproduction\n description: A link to a reproduction repo or steps to reproduce the behaviour.\n placeholder: |\n Please provide a minimal reproduction or steps to reproduce, see this guide https://stackoverflow.com/help/minimal-reproducible-example\n Why reproduction is required? see this article https://antfu.me/posts/why-reproductions-are-required\n\n - type: textarea\n id: expected-behavior\n attributes:\n label: Expected behavior\n description: A clear description of what you expected to happen.\n\n - type: textarea\n id: info\n attributes:\n label: Full `tauri info` output\n description: 'Output of `npm run tauri info` or `cargo tauri info`'\n render: text\n validations:\n required: true\n\n - type: textarea\n id: logs\n attributes:\n label: Stack trace\n render: text\n\n - type: textarea\n id: context\n attributes:\n label: Additional context\n description: Add any other context about the problem here.\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\ISSUE_TEMPLATE\bug_report.yml
|
bug_report.yml
|
YAML
| 2,013 | 0.95 | 0.031746 | 0.072727 |
vue-tools
| 571 |
2025-03-09T14:18:09.451706
|
Apache-2.0
| false |
580d850bc2bdec4e08012034b888c60d
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\ncontact_links:\n - name: 💬 Discord Chat\n url: https://discord.com/invite/tauri\n about: Ask questions and talk to other Tauri users and the maintainers\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\ISSUE_TEMPLATE\config.yml
|
config.yml
|
YAML
| 299 | 0.8 | 0 | 0.428571 |
node-utils
| 381 |
2024-07-24T08:26:41.115901
|
MIT
| false |
a9fbec8a3018396a00bc1acee0cad7b1
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: 💡 Feature Request\ntitle: '[feat] '\ndescription: Suggest an idea\nlabels: ['type: feature request']\n\nbody:\n - type: textarea\n id: problem\n attributes:\n label: Describe the problem\n description: A clear description of the problem this feature would solve\n placeholder: "I'm always frustrated when..."\n validations:\n required: true\n\n - type: textarea\n id: solution\n attributes:\n label: "Describe the solution you'd like"\n description: A clear description of what change you would like\n placeholder: 'I would like to...'\n validations:\n required: true\n\n - type: textarea\n id: alternatives\n attributes:\n label: Alternatives considered\n description: "Any alternative solutions you've considered"\n\n - type: textarea\n id: context\n attributes:\n label: Additional context\n description: Add any other context about the problem here.\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\ISSUE_TEMPLATE\feature_request.yml
|
feature_request.yml
|
YAML
| 1,062 | 0.95 | 0 | 0.088235 |
react-lib
| 803 |
2024-03-09T05:54:26.955242
|
MIT
| false |
71db5cac3f049dc7b1da978705f79b0c
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: Audit\n\non:\n workflow_dispatch:\n schedule:\n - cron: '0 0 * * *'\n push:\n paths:\n - '.github/workflows/audit.yml'\n - '**/Cargo.lock'\n - '**/Cargo.toml'\n - '**/package.json'\n - '**/pnpm-lock.yaml'\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n audit-rust:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - name: rust audit\n uses: rustsec/audit-check@v1\n with:\n token: ${{ secrets.GITHUB_TOKEN }}\n\n audit-js:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - uses: actions/setup-node@v4\n with:\n node-version: 'lts/*'\n - run: pnpm audit\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\audit.yml
|
audit.yml
|
YAML
| 907 | 0.8 | 0 | 0.083333 |
awesome-app
| 483 |
2025-05-22T21:22:57.360595
|
BSD-3-Clause
| false |
73f12bebe12040966c14f92e36603978
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: bench\n\non:\n push:\n branches:\n - dev\n workflow_dispatch:\n pull_request:\n paths:\n - '.github/workflows/bench.yml'\n - 'bench/**'\n\nenv:\n RUST_BACKTRACE: 1\n CARGO_PROFILE_DEV_DEBUG: 0 # This would add unnecessary bloat to the target folder, decreasing cache efficiency.\n LC_ALL: en_US.UTF-8 # This prevents strace from changing its number format to use commas.\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n bench:\n strategy:\n fail-fast: false\n matrix:\n rust: [nightly]\n platform:\n - { target: x86_64-unknown-linux-gnu, os: ubuntu-latest }\n\n runs-on: ${{ matrix.platform.os }}\n\n steps:\n - uses: actions/checkout@v4\n\n - name: install Rust ${{ matrix.rust }}\n uses: dtolnay/rust-toolchain@master\n with:\n toolchain: ${{ matrix.rust }}\n components: rust-src\n targets: ${{ matrix.platform.target }}\n\n - name: setup python\n uses: actions/setup-python@v5\n with:\n python-version: '3.10'\n architecture: x64\n\n - name: install dependencies\n run: |\n python -m pip install --upgrade pip\n sudo apt-get update\n sudo apt-get install -y --no-install-recommends \\n webkit2gtk-4.1 libayatana-appindicator3-dev \\n xvfb \\n at-spi2-core\n wget https://github.com/sharkdp/hyperfine/releases/download/v1.18.0/hyperfine_1.18.0_amd64.deb\n sudo dpkg -i hyperfine_1.18.0_amd64.deb\n pip install memory_profiler\n\n - uses: Swatinem/rust-cache@v2\n\n - name: run benchmarks\n run: |\n cargo build --manifest-path bench/tests/cpu_intensive/src-tauri/Cargo.toml --release -Z build-std=std,panic_abort -Z build-std-features=panic_immediate_abort --target ${{ matrix.platform.target }}\n cargo build --manifest-path bench/tests/files_transfer/src-tauri/Cargo.toml --release -Z build-std=std,panic_abort -Z build-std-features=panic_immediate_abort --target ${{ matrix.platform.target }}\n cargo build --manifest-path bench/tests/helloworld/src-tauri/Cargo.toml --release -Z build-std=std,panic_abort -Z build-std-features=panic_immediate_abort --target ${{ matrix.platform.target }}\n xvfb-run --auto-servernum cargo run --manifest-path bench/Cargo.toml --bin run_benchmark\n\n - name: clone benchmarks_results\n if: github.repository == 'tauri-apps/tauri' && github.ref == 'refs/heads/dev'\n uses: actions/checkout@v4\n with:\n token: ${{ secrets.BENCH_PAT }}\n path: gh-pages\n repository: tauri-apps/benchmark_results\n\n - name: push new benchmarks\n if: github.repository == 'tauri-apps/tauri' && github.ref == 'refs/heads/dev'\n run: |\n cargo run --manifest-path bench/Cargo.toml --bin build_benchmark_jsons\n cd gh-pages\n git pull\n git config user.name "tauri-bench"\n git config user.email "[email protected]"\n git add .\n git commit --message "Update Tauri benchmarks"\n git push origin gh-pages\n\n - name: Print worker info\n run: |\n cat /proc/cpuinfo\n cat /proc/meminfo\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\bench.yml
|
bench.yml
|
YAML
| 3,398 | 0.8 | 0.020619 | 0.036585 |
vue-tools
| 852 |
2024-07-23T22:31:28.152370
|
Apache-2.0
| false |
67fd2c4559f84ca38f65bf315011a46c
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: check change tags\n\non:\n pull_request:\n paths:\n - '.changes/*.md'\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n check:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n\n - name: check change files end with .md\n run: |\n for file in .changes/*\n do\n if [[ ! "$file" =~ \.(md|json)$ ]]; then\n echo ".changes directory should only contain files that end with .md"\n echo "found an invalid file in .changes directory:"\n echo "$file"\n exit 1\n fi\n done\n\n - uses: dorny/paths-filter@v3\n id: filter\n with:\n list-files: shell\n filters: |\n changes:\n - added|modified: '.changes/*.md'\n\n - name: check\n run: node ./.scripts/ci/check-change-tags.js ${{ steps.filter.outputs.changes_files }}\n if: ${{ steps.filter.outputs.changes == 'true' }}\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\check-change-tags.yml
|
check-change-tags.yml
|
YAML
| 1,149 | 0.8 | 0.068182 | 0.081081 |
react-lib
| 19 |
2024-01-08T09:28:35.647403
|
MIT
| false |
967d7c6d605f8a6f780da886e7697fc6
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: check generated files\n\non:\n pull_request:\n paths:\n - '.github/workflows/check-generated-files.yml'\n - 'packages/api/src/**'\n - 'crates/tauri/scripts/bundle.global.js'\n - 'crates/tauri-utils/src/config.rs'\n - 'crates/tauri-cli/config.schema.json'\n - 'crates/tauri-schema-generator/schemas/*.json'\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n changes:\n runs-on: ubuntu-latest\n outputs:\n api: ${{ steps.filter.outputs.api }}\n schema: ${{ steps.filter.outputs.schema }}\n steps:\n - uses: actions/checkout@v4\n - uses: dorny/paths-filter@v3\n id: filter\n with:\n filters: |\n api:\n - 'packages/api/src/**'\n - 'crates/tauri/scripts/bundle.global.js'\n schema:\n - 'crates/tauri-utils/src/config.rs'\n - 'crates/tauri-cli/config.schema.json'\n - 'crates/tauri-schema-generator/schemas/*.json'\n\n api:\n runs-on: ubuntu-latest\n needs: changes\n if: needs.changes.outputs.api == 'true'\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - uses: actions/setup-node@v4\n with:\n node-version: 'lts/*'\n cache: 'pnpm'\n\n - name: install deps\n run: pnpm i --frozen-lockfile\n - name: build api\n run: pnpm build\n working-directory: packages/api\n - name: check api\n run: ./.scripts/ci/has-diff.sh\n\n schema:\n runs-on: ubuntu-latest\n needs: changes\n if: needs.changes.outputs.schema == 'true'\n steps:\n - uses: actions/checkout@v4\n\n - name: install stable\n uses: dtolnay/rust-toolchain@stable\n\n - name: install Linux dependencies\n run: |\n sudo apt-get update\n sudo apt-get install -y libgtk-3-dev\n\n - uses: Swatinem/rust-cache@v2\n\n - name: generate schemas\n run: cargo build --manifest-path ./crates/tauri-schema-generator/Cargo.toml\n\n - name: check schemas\n run: ./.scripts/ci/has-diff.sh\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\check-generated-files.yml
|
check-generated-files.yml
|
YAML
| 2,234 | 0.8 | 0.02439 | 0.042857 |
react-lib
| 147 |
2024-05-09T11:20:03.795546
|
MIT
| false |
5213c8a4ffc0720c6237eb632aaf638a
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: check license headers\n\non:\n pull_request:\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n check:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: dorny/paths-filter@v3\n id: filter\n with:\n list-files: shell\n filters: |\n added:\n - added: '**'\n - name: check header license on new files\n if: ${{ steps.filter.outputs.added == 'true' }}\n run: node ./.scripts/ci/check-license-header.js ${{ steps.filter.outputs.added_files }}\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\check-license-header.yml
|
check-license-header.yml
|
YAML
| 732 | 0.8 | 0.035714 | 0.125 |
node-utils
| 765 |
2024-07-10T12:10:26.099019
|
GPL-3.0
| false |
b9375634fe61cc1dbb5f8693412a6007
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: covector comment\non:\n workflow_run:\n workflows: [covector status] # the `name` of the workflow run on `pull_request` running `status` with `comment: true`\n types:\n - completed\n\n# note all other permissions are set to none if not specified\n# and these set the permissions for `secrets.GITHUB_TOKEN`\npermissions:\n # to read the action artifacts on `covector status` workflows\n actions: read\n # to write the comment\n pull-requests: write\n\njobs:\n comment:\n runs-on: ubuntu-latest\n if: github.event.workflow_run.conclusion == 'success' &&\n (github.event.workflow_run.head_repository.full_name != github.repository || github.actor == 'dependabot[bot]')\n steps:\n - name: covector status\n uses: jbolda/covector/packages/action@covector-v0\n with:\n token: ${{ secrets.GITHUB_TOKEN }}\n command: 'status'\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\covector-comment-on-fork.yml
|
covector-comment-on-fork.yml
|
YAML
| 1,007 | 0.8 | 0.1 | 0.259259 |
react-lib
| 876 |
2025-03-11T04:39:08.525260
|
GPL-3.0
| false |
3143293e70f099fd859d3eb56ada3b4c
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: covector status\non: [pull_request]\n\njobs:\n covector:\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n with:\n fetch-depth: 0\n - name: covector status\n uses: jbolda/covector/packages/action@covector-v0\n id: covector\n with:\n command: 'status'\n token: ${{ secrets.GITHUB_TOKEN }}\n comment: true\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\covector-status.yml
|
covector-status.yml
|
YAML
| 531 | 0.8 | 0 | 0.157895 |
python-kit
| 923 |
2024-01-29T17:55:10.088699
|
Apache-2.0
| false |
9dac3ac2e349f624fd6bd8fce67735e6
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: covector version or publish\n\non:\n push:\n branches:\n - dev\n\njobs:\n run-integration-tests:\n runs-on: ${{ matrix.platform }}\n\n strategy:\n fail-fast: false\n matrix:\n platform: [ubuntu-latest, macos-latest, windows-latest]\n\n steps:\n - uses: actions/checkout@v4\n with:\n fetch-depth: 0\n\n - run: npm i -g --force corepack\n - uses: actions/setup-node@v4\n with:\n node-version: 20\n cache: 'pnpm'\n\n - name: install stable\n uses: dtolnay/rust-toolchain@stable\n\n - name: install Linux dependencies\n if: matrix.platform == 'ubuntu-latest'\n run: |\n sudo apt-get update\n sudo apt-get install -y webkit2gtk-4.1 libayatana-appindicator3-dev libfuse2 librsvg2-dev\n\n - uses: Swatinem/rust-cache@v2\n\n - name: build CLI\n run: cargo build --manifest-path ./crates/tauri-cli/Cargo.toml\n\n - name: run integration tests\n run: cargo test --test '*' -- --ignored\n\n - name: run CLI tests\n timeout-minutes: 30\n run: |\n cd ./packages/cli\n pnpm i --frozen-lockfile\n pnpm build\n pnpm test\n\n version-or-publish:\n runs-on: ubuntu-latest\n timeout-minutes: 65\n permissions:\n actions: write # required for workflow_dispatch\n contents: write # required to create new releases\n pull-requests: write # required to open version update pr\n id-token: write # pnpm provenance\n outputs:\n change: ${{ steps.covector.outputs.change }}\n commandRan: ${{ steps.covector.outputs.commandRan }}\n successfulPublish: ${{ steps.covector.outputs.successfulPublish }}\n needs:\n - run-integration-tests\n\n steps:\n - uses: actions/checkout@v4\n with:\n fetch-depth: 0\n - run: npm i -g --force corepack\n - uses: actions/setup-node@v4\n with:\n node-version: 20\n registry-url: 'https://registry.npmjs.org'\n cache: 'pnpm'\n\n - name: cargo login\n run: cargo login ${{ secrets.ORG_CRATES_IO_TOKEN }}\n - name: git config\n run: |\n git config --global user.name "${{ github.event.pusher.name }}"\n git config --global user.email "${{ github.event.pusher.email }}"\n\n - name: install Linux dependencies\n run: |\n sudo apt-get update\n sudo apt-get install -y webkit2gtk-4.1 libayatana-appindicator3-dev librsvg2-dev\n\n - name: covector version or publish (publish when no change files present)\n uses: jbolda/covector/packages/action@covector-v0\n id: covector\n env:\n NODE_AUTH_TOKEN: ${{ secrets.ORG_NPM_TOKEN }}\n CARGO_AUDIT_OPTIONS: ${{ secrets.CARGO_AUDIT_OPTIONS }}\n NPM_CONFIG_PROVENANCE: true\n with:\n command: 'version-or-publish'\n token: ${{ secrets.GITHUB_TOKEN }}\n createRelease: true\n recognizeContributors: true\n\n - name: Sync Cargo.lock\n if: steps.covector.outputs.commandRan == 'version'\n run: cargo tree --depth 0\n\n - name: Create Pull Request With Versions Bumped\n if: steps.covector.outputs.commandRan == 'version'\n uses: peter-evans/create-pull-request@67ccf781d68cd99b580ae25a5c18a1cc84ffff1f # 7.0.6\n with:\n token: ${{ secrets.GITHUB_TOKEN }}\n branch: release/version-updates\n title: Apply Version Updates From Current Changes\n commit-message: 'apply version updates'\n labels: 'version updates'\n body: ${{ steps.covector.outputs.change }}\n\n - name: Trigger doc update\n if: |\n steps.covector.outputs.successfulPublish == 'true' &&\n steps.covector.outputs.packagesPublished != ''\n uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # 3.0.0\n with:\n token: ${{ secrets.ORG_TAURI_BOT_PAT }}\n repository: tauri-apps/tauri-docs\n event-type: update-docs\n\n - name: Trigger `@tauri-apps/cli` publishing workflow\n if: |\n steps.covector.outputs.successfulPublish == 'true' &&\n contains(steps.covector.outputs.packagesPublished, '@tauri-apps/cli')\n run: gh workflow run 31554138 -r dev -f releaseId=${{ steps.covector.outputs['-tauri-apps-cli-releaseId'] }}\n env:\n GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n\n - name: Trigger `tauri-cli` publishing workflow\n if: |\n steps.covector.outputs.successfulPublish == 'true' &&\n contains(steps.covector.outputs.packagesPublished, 'tauri-cli')\n run: gh workflow run 31554139 -r dev\n env:\n GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\covector-version-or-publish.yml
|
covector-version-or-publish.yml
|
YAML
| 4,851 | 0.95 | 0.047619 | 0.024 |
python-kit
| 819 |
2025-04-04T09:16:12.695044
|
MIT
| false |
850379374a4958499954e40306a4245d
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: deploy schema worker\n\non:\n push:\n branches:\n - dev\n paths:\n - '.github/workflows/deploy-schema-worker.yml'\n - 'crates/tauri-schema-worker/**'\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: cloudflare/wrangler-action@v3\n with:\n command: deploy\n workingDirectory: 'crates/tauri-schema-worker'\n apiToken: ${{ secrets.SCHEMA_WORKER_CLOUDFLARE_API_TOKEN }}\n accountId: ${{ secrets.SCHEMA_WORKER_CLOUDFLARE_ACCOUNT_ID }}\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\deploy-schema-worker.yml
|
deploy-schema-worker.yml
|
YAML
| 682 | 0.8 | 0 | 0.136364 |
vue-tools
| 874 |
2025-02-09T09:30:09.461271
|
MIT
| false |
951f3eafdccd6339659ec05e0f7c1afa
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: docker\n\non:\n workflow_dispatch:\n #pull_request:\n # paths:\n # - '.docker/**'\n # - '.github/workflows/docker.yml'\n\njobs:\n setup:\n runs-on: ubuntu-latest\n\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n\n - name: install stable\n uses: dtolnay/rust-toolchain@stable\n\n - name: install Linux dependencies\n run: |\n sudo apt-get update\n sudo apt-get install -y libgtk-3-dev\n\n - name: install cross\n run: cargo install cross --git https://github.com/cross-rs/cross\n\n - name: Upload cross\n uses: actions/upload-artifact@v4\n with:\n name: cross\n path: '~/.cargo/bin/cross'\n if-no-files-found: error\n\n - name: build CLI\n run: cargo build --manifest-path ./crates/tauri-cli/Cargo.toml\n\n - name: Upload CLI\n uses: actions/upload-artifact@v4\n with:\n name: cargo-tauri\n path: crates/tauri-cli/target/debug/cargo-tauri\n if-no-files-found: error\n\n docker:\n needs: setup\n runs-on: ubuntu-latest\n\n strategy:\n fail-fast: false\n matrix:\n target:\n - { name: 'aarch64-unknown-linux-gnu', filename: 'aarch64' }\n\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n\n - name: install stable\n uses: dtolnay/rust-toolchain@stable\n with:\n targets: ${{ matrix.target.name }}\n\n - run: npm i -g --force corepack\n - name: Setup node\n uses: actions/setup-node@v4\n with:\n node-version: 'lts/*'\n\n - name: Download cross\n uses: actions/[email protected]\n with:\n name: cross\n path: '~/.cargo/bin'\n\n - name: Download CLI\n uses: actions/[email protected]\n with:\n name: cargo-tauri\n path: 'examples/api'\n\n - name: Set up QEMU\n uses: docker/setup-qemu-action@v2\n\n - name: Set up Docker Buildx\n uses: docker/setup-buildx-action@v2\n\n - name: Login to GitHub Container Registry\n uses: docker/login-action@v2\n with:\n registry: ghcr.io\n username: ${{ github.repository_owner }}\n password: ${{ secrets.GITHUB_TOKEN }}\n\n - name: Build and export to Docker\n uses: docker/build-push-action@v3\n with:\n context: .docker/cross\n file: .docker/cross/${{ matrix.target.filename }}.Dockerfile\n load: true\n tags: ${{ matrix.target.name }}:latest\n\n - name: install dependencies\n run: |\n sudo apt-get update\n sudo apt-get install -y webkit2gtk-4.1 libayatana-appindicator3-dev\n\n - name: Test\n run: |\n cd packages/api\n pnpm i --frozen-lockfile && pnpm build\n cd ../../examples/api\n pnpm i --frozen-lockfile\n . .setup-cross.sh\n chmod +x cargo-tauri\n chmod +x $HOME/.cargo/bin/cross\n ./cargo-tauri build --runner cross --bundles deb --target ${{ matrix.target.name }} --verbose\n\n - name: Build and push\n uses: docker/build-push-action@v3\n with:\n context: .docker/cross\n file: .docker/cross/${{ matrix.target.filename }}.Dockerfile\n push: true\n tags: ghcr.io/${{ github.repository }}/${{ matrix.target.name }}:latest\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\docker.yml
|
docker.yml
|
YAML
| 3,484 | 0.8 | 0.015385 | 0.066038 |
react-lib
| 987 |
2024-10-26T09:32:01.962732
|
Apache-2.0
| false |
dfcd3f883cf1ad4bf9ab9179770853f0
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: check formatting\n\non:\n pull_request:\n\njobs:\n rustfmt:\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n\n - name: install Rust stable and rustfmt\n uses: dtolnay/rust-toolchain@stable\n with:\n components: rustfmt\n\n - name: run cargo fmt\n run: cargo fmt --all -- --check\n\n prettier:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - uses: actions/setup-node@v4\n with:\n node-version: 'lts/*'\n cache: 'pnpm'\n - run: pnpm i --frozen-lockfile\n - run: pnpm format:check\n\n taplo:\n name: taplo (.toml files)\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n\n - name: install Rust stable\n uses: dtolnay/rust-toolchain@stable\n\n - name: install taplo-cli\n uses: taiki-e/install-action@v2\n with:\n tool: taplo-cli\n\n - run: taplo fmt --check --diff\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\fmt.yml
|
fmt.yml
|
YAML
| 1,120 | 0.8 | 0 | 0.075 |
node-utils
| 394 |
2025-01-29T09:14:45.397929
|
MIT
| false |
42dbde9ce380f29c98d3669a5cd0de2c
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: lint js\n\non:\n pull_request:\n paths:\n - '.github/workflows/lint-js.yml'\n - 'packages/**'\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n eslint:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - uses: actions/setup-node@v4\n with:\n node-version: 'lts/*'\n cache: 'pnpm'\n - run: pnpm i --frozen-lockfile\n - run: pnpm eslint:check\n\n typescript:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - uses: actions/setup-node@v4\n with:\n node-version: 'lts/*'\n cache: 'pnpm'\n - run: pnpm i --frozen-lockfile\n - run: pnpm ts:check\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\lint-js.yml
|
lint-js.yml
|
YAML
| 938 | 0.8 | 0 | 0.085714 |
react-lib
| 866 |
2025-05-15T10:46:49.332120
|
MIT
| false |
8b612ad5b558f5873211f80e666cc391
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: lint rust\n\non:\n push:\n branches:\n - dev\n pull_request:\n paths:\n - '.github/workflows/lint-cli.yml'\n - 'crates/**'\n\nenv:\n RUST_BACKTRACE: 1\n CARGO_PROFILE_DEV_DEBUG: 0 # This would add unnecessary bloat to the target folder, decreasing cache efficiency.\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n clippy:\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n\n - name: install rust stable and clippy\n uses: dtolnay/rust-toolchain@stable\n with:\n components: clippy\n\n - name: install dependencies\n run: |\n sudo apt-get update\n sudo apt-get install -y libgtk-3-dev webkit2gtk-4.1 libayatana-appindicator3-dev\n\n - uses: Swatinem/rust-cache@v2\n\n - run: cargo clippy --all-targets --all-features -- -D warnings\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\lint-rust.yml
|
lint-rust.yml
|
YAML
| 1,018 | 0.8 | 0 | 0.090909 |
react-lib
| 807 |
2025-04-13T16:26:05.557983
|
Apache-2.0
| false |
45a9751ccfbedc80177bf0a88ccc3388
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: publish `@tauri-apps/cli`\nenv:\n DEBUG: napi:*\n APP_NAME: cli\n MACOSX_DEPLOYMENT_TARGET: '10.13'\non:\n workflow_dispatch:\n inputs:\n releaseId:\n description: 'ID of the `@tauri-apps/cli` release'\n required: true\n repository_dispatch:\n types: [publish-js-cli]\n\ndefaults:\n run:\n working-directory: packages/cli/\n\njobs:\n build:\n strategy:\n fail-fast: false\n matrix:\n settings:\n - host: macos-latest\n target: x86_64-apple-darwin\n architecture: x64\n build: |\n pnpm build --target=x86_64-apple-darwin\n strip -x *.node\n - host: windows-latest\n build: pnpm build\n target: x86_64-pc-windows-msvc\n architecture: x64\n - host: windows-latest\n build: pnpm build --target i686-pc-windows-msvc\n target: i686-pc-windows-msvc\n architecture: x64\n - host: windows-latest\n architecture: x64\n target: aarch64-pc-windows-msvc\n build: pnpm build --target aarch64-pc-windows-msvc\n - host: ubuntu-22.04\n target: x86_64-unknown-linux-gnu\n docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian\n build: |\n npm i -g --force corepack\n cd packages/cli\n pnpm build --target x86_64-unknown-linux-gnu\n strip *.node\n - host: ubuntu-22.04\n target: x86_64-unknown-linux-musl\n docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine\n build: |\n cd packages/cli\n pnpm build\n strip *.node\n - host: macos-latest\n target: aarch64-apple-darwin\n build: |\n pnpm build --features native-tls-vendored --target=aarch64-apple-darwin\n strip -x *.node\n - host: ubuntu-22.04\n target: aarch64-unknown-linux-gnu\n docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64\n build: |\n npm i -g --force corepack\n cd packages/cli\n pnpm build --target aarch64-unknown-linux-gnu\n aarch64-unknown-linux-gnu-strip *.node\n - host: ubuntu-22.04\n architecture: x64\n target: armv7-unknown-linux-gnueabihf\n setup: |\n sudo apt-get update\n sudo apt-get install gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf -y\n build: |\n pnpm build --target=armv7-unknown-linux-gnueabihf\n arm-linux-gnueabihf-strip *.node\n - host: ubuntu-22.04\n architecture: x64\n target: aarch64-unknown-linux-musl\n docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine\n build: |\n cd packages/cli\n rustup target add aarch64-unknown-linux-musl\n pnpm build --target aarch64-unknown-linux-musl\n /aarch64-linux-musl-cross/bin/aarch64-linux-musl-strip *.node\n - host: ubuntu-22.04\n architecture: x64\n target: riscv64gc-unknown-linux-gnu\n setup: |\n sudo apt-get update\n sudo apt-get install gcc-riscv64-linux-gnu g++-riscv64-linux-gnu -y\n build: |\n pnpm build --target=riscv64gc-unknown-linux-gnu\n riscv64-linux-gnu-strip *.node\n name: stable - ${{ matrix.settings.target }} - node@20\n runs-on: ${{ matrix.settings.host }}\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - name: Setup node\n uses: actions/setup-node@v4\n if: ${{ !matrix.settings.docker }}\n with:\n node-version: 20\n cache: 'pnpm'\n architecture: ${{ matrix.settings.architecture }}\n - name: Install Rust\n uses: dtolnay/rust-toolchain@stable\n if: ${{ !matrix.settings.docker }}\n with:\n targets: ${{ matrix.settings.target }}\n - uses: Swatinem/rust-cache@v1\n with:\n key: ${{ matrix.settings.target }}\n if: ${{ matrix.settings.docker }}\n - name: Setup toolchain\n run: ${{ matrix.settings.setup }}\n if: ${{ matrix.settings.setup }}\n shell: bash\n - name: Install dependencies\n run: pnpm i --frozen-lockfile --ignore-scripts\n\n - name: Build in docker\n uses: addnab/docker-run-action@v3\n if: ${{ matrix.settings.docker }}\n with:\n image: ${{ matrix.settings.docker }}\n options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/root/.cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/root/.cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/root/.cargo/registry/index -v ${{ github.workspace }}:/build -w /build\n run: ${{ matrix.settings.build }}\n\n - name: Build\n run: ${{ matrix.settings.build }}\n if: ${{ !matrix.settings.docker }}\n shell: bash\n\n - name: Upload artifact\n uses: actions/upload-artifact@v4\n with:\n name: bindings-${{ matrix.settings.target }}\n path: packages/cli/${{ env.APP_NAME }}.*.node\n if-no-files-found: error\n # build-freebsd:\n # runs-on: macos-10.15\n # name: Build FreeBSD\n # steps:\n # - uses: actions/checkout@v4\n # - name: Build\n # id: build\n # uses: vmactions/[email protected]\n # env:\n # DEBUG: napi:*\n # RUSTUP_HOME: /usr/local/rustup\n # CARGO_HOME: /usr/local/cargo\n # RUSTUP_IO_THREADS: 1\n # with:\n # envs: DEBUG RUSTUP_HOME CARGO_HOME RUSTUP_IO_THREADS\n # usesh: true\n # mem: 3000\n # prepare: |\n # pkg install -y curl node14 python2\n # curl -qL https://www.npmjs.com/install.sh | sh\n # npm install -g pnpm\n # curl https://sh.rustup.rs -sSf --output rustup.sh\n # sh rustup.sh -y --profile minimal --default-toolchain stable\n # export PATH="/usr/local/cargo/bin:$PATH"\n # echo "~~~~ rustc --version ~~~~"\n # rustc --version\n # echo "~~~~ node -v ~~~~"\n # node -v\n # echo "~~~~ pnpm --version ~~~~"\n # pnpm --version\n # run: |\n # export PATH="/usr/local/cargo/bin:$PATH"\n # pwd\n # ls -lah\n # whoami\n # env\n # freebsd-version\n # cd ./packages/cli/\n # pnpm i --frozen-lockfile --ignore-scripts\n # pnpm build\n # strip -x *.node\n # rm -rf node_modules\n # rm -rf ../../target\n # - name: Upload artifact\n # uses: actions/upload-artifact@v4\n # with:\n # name: bindings-freebsd\n # path: packages/cli/${{ env.APP_NAME }}.*.node\n # if-no-files-found: error\n test-macOS-windows-binding:\n name: Test bindings on ${{ matrix.settings.target }} - node@${{ matrix.node }}\n needs:\n - build\n strategy:\n fail-fast: false\n matrix:\n settings:\n - host: macos-latest\n target: aarch64-apple-darwin\n - host: windows-latest\n target: x86_64-pc-windows-msvc\n node:\n - '18'\n - '20'\n runs-on: ${{ matrix.settings.host }}\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - name: Setup node\n uses: actions/setup-node@v4\n with:\n node-version: ${{ matrix.node }}\n cache: 'pnpm'\n - name: Install dependencies\n run: pnpm i --frozen-lockfile --ignore-scripts\n - name: Download artifacts\n uses: actions/[email protected]\n with:\n name: bindings-${{ matrix.settings.target }}\n path: 'packages/cli/'\n - name: List packages\n run: ls -R .\n shell: bash\n - name: Test bindings\n run: pnpm test\n test-linux-x64-gnu-binding:\n name: Test bindings on Linux-x64-gnu - node@${{ matrix.node }}\n needs:\n - build\n strategy:\n fail-fast: false\n matrix:\n node:\n - '18'\n - '20'\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - name: Setup node\n uses: actions/setup-node@v4\n with:\n node-version: ${{ matrix.node }}\n cache: 'pnpm'\n - name: Install dependencies\n run: pnpm i --frozen-lockfile --ignore-scripts\n - name: Download artifacts\n uses: actions/[email protected]\n with:\n name: bindings-x86_64-unknown-linux-gnu\n path: 'packages/cli'\n - name: List packages\n run: ls -R .\n shell: bash\n - name: install system dependencies\n run: |\n sudo apt-get update\n sudo apt-get install -y webkit2gtk-4.1 libayatana-appindicator3-dev librsvg2-dev\n - name: Test bindings\n run: pnpm test\n test-linux-x64-musl-binding:\n name: Test bindings on x86_64-unknown-linux-musl - node@${{ matrix.node }}\n needs:\n - build\n strategy:\n fail-fast: false\n matrix:\n node:\n - '18'\n - '20'\n runs-on: ubuntu-latest\n container:\n image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - name: Setup node\n uses: actions/setup-node@v4\n with:\n node-version: ${{ matrix.node }}\n cache: 'pnpm'\n - name: Install dependencies\n run: pnpm i --frozen-lockfile --ignore-scripts\n - name: Download artifacts\n uses: actions/[email protected]\n with:\n name: bindings-x86_64-unknown-linux-musl\n path: 'packages/cli/'\n - name: List packages\n run: ls -R .\n shell: bash\n - name: Setup and run tests\n run: |\n pnpm tauri --help\n ls -la\n #- name: Setup and run tests\n # run: |\n # rustup install stable\n # rustup default stable\n # pnpm test\n # ls -la\n test-linux-arm-bindings:\n name: Test bindings on ${{ matrix.image }} - node@${{ matrix.node }}\n needs:\n - build\n strategy:\n fail-fast: false\n matrix:\n node:\n - '18'\n - '20'\n image:\n - ghcr.io/napi-rs/napi-rs/nodejs:aarch64-16\n runs-on: ubuntu-latest\n steps:\n - run: docker run --rm --privileged multiarch/qemu-user-static:register --reset\n working-directory: ${{ github.workspace }}\n - uses: actions/checkout@v4\n - name: List packages\n run: ls -R .\n shell: bash\n - name: Download aarch64-gnu artifacts\n uses: actions/[email protected]\n with:\n name: bindings-aarch64-unknown-linux-gnu\n path: 'packages/cli'\n - name: Download armv7-gnueabihf artifacts\n uses: actions/[email protected]\n with:\n name: bindings-armv7-unknown-linux-gnueabihf\n path: 'packages/cli/'\n # TODO: actually run test, blocked by https://github.com/rust-lang/cargo/issues/8719\n - uses: addnab/docker-run-action@v3\n with:\n image: ${{ matrix.image }}\n options: '-v ${{ github.workspace }}:/build -w /build -e RUSTUP_HOME=/usr/local/rustup -e CARGO_HOME=/usr/local/cargo'\n shell: bash\n run: |\n set -e\n export PATH=/usr/local/cargo/bin/:/usr/local/fnm:$PATH\n apt-get update\n DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends -y unzip libayatana-appindicator3-dev\n bash\n curl https://sh.rustup.rs -sSf | bash -s -- -y\n curl -fsSL https://fnm.vercel.app/install | bash -s -- --install-dir "/usr/local/fnm" --skip-shell\n eval "$(fnm env --use-on-cd)"\n fnm install ${{ matrix.node }}\n fnm use ${{ matrix.node }}\n cd packages/cli\n node tauri.js --help\n ls -la\n publish:\n name: Publish\n runs-on: ubuntu-latest\n needs:\n #- build-freebsd\n - test-macOS-windows-binding\n - test-linux-x64-gnu-binding\n - test-linux-x64-musl-binding\n #- test-linux-arm-bindings\n permissions:\n contents: write # update release\n id-token: write # npm provenance\n steps:\n - uses: actions/checkout@v4\n - run: npm i -g --force corepack\n - name: Setup node\n uses: actions/setup-node@v4\n with:\n node-version: 20\n cache: 'pnpm'\n - name: Install dependencies\n run: pnpm i --frozen-lockfile --ignore-scripts\n - name: Download all artifacts\n uses: actions/[email protected]\n with:\n path: packages/cli/artifacts\n - name: Move artifacts\n run: pnpm artifacts\n - name: List packages\n run: ls -R ./npm\n shell: bash\n - name: Publish\n run: |\n echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc\n npm publish\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n NPM_TOKEN: ${{ secrets.ORG_NPM_TOKEN }}\n RELEASE_ID: ${{ github.event.client_payload.releaseId || inputs.releaseId }}\n NPM_CONFIG_PROVENANCE: true\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\publish-cli-js.yml
|
publish-cli-js.yml
|
YAML
| 13,567 | 0.95 | 0.02005 | 0.155216 |
react-lib
| 640 |
2023-08-29T13:41:46.751410
|
BSD-3-Clause
| false |
21bfbb1089c82d10f4f698816be6661c
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: publish `tauri-cli`\nenv:\n MACOSX_DEPLOYMENT_TARGET: '10.13'\non:\n workflow_dispatch:\n repository_dispatch:\n types: [publish-clirs]\n\njobs:\n build:\n runs-on: ${{ matrix.config.os }}\n\n strategy:\n fail-fast: false\n matrix:\n config:\n - os: ubuntu-22.04\n rust_target: x86_64-unknown-linux-gnu\n ext: ''\n args: ''\n - os: macos-latest\n rust_target: x86_64-apple-darwin\n ext: ''\n args: ''\n - os: macos-latest\n rust_target: aarch64-apple-darwin\n ext: ''\n args: ''\n - os: windows-latest\n rust_target: x86_64-pc-windows-msvc\n ext: '.exe'\n args: ''\n - os: windows-latest\n rust_target: aarch64-pc-windows-msvc\n ext: '.exe'\n args: ''\n\n steps:\n - uses: actions/checkout@v4\n\n - name: 'Setup Rust'\n uses: dtolnay/rust-toolchain@stable\n with:\n targets: ${{ matrix.config.rust_target }}\n\n - uses: Swatinem/rust-cache@v2\n with:\n key: ${{ matrix.config.rust_target }}\n\n - name: install Linux dependencies\n if: matrix.config.os == 'ubuntu-latest'\n run: |\n sudo apt-get update\n sudo apt-get install -y libgtk-3-dev\n\n - name: Build CLI\n run: cargo build --manifest-path ./crates/tauri-cli/Cargo.toml --profile release-size-optimized ${{ matrix.config.args }}\n\n - name: Upload CLI\n uses: actions/upload-artifact@v4\n with:\n name: cargo-tauri-${{ matrix.config.rust_target }}${{ matrix.config.ext }}\n path: target/release-size-optimized/cargo-tauri${{ matrix.config.ext }}\n if-no-files-found: error\n\n upload:\n needs: build\n runs-on: ubuntu-latest\n\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n\n - name: Download built CLIs\n uses: actions/[email protected]\n with:\n path: outputs\n\n - name: Pack archives\n run: ./.scripts/ci/pack-cli.sh\n\n - name: Get CLI version\n run: echo "CLI_VERSION=$(cat crates/tauri-cli/metadata-v2.json | jq '."cli.js".version' -r)" >> $GITHUB_ENV\n\n - name: Publish release\n uses: softprops/action-gh-release@50195ba7f6f93d1ac97ba8332a178e008ad176aa\n with:\n tag_name: tauri-cli-v${{ env.CLI_VERSION }}\n files: |\n outputs/cargo-tauri-*.zip\n outputs/cargo-tauri-*.tgz\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\publish-cli-rs.yml
|
publish-cli-rs.yml
|
YAML
| 2,642 | 0.8 | 0.021053 | 0.0375 |
awesome-app
| 280 |
2024-10-10T09:40:15.254833
|
MIT
| false |
4770b33ec9b1e3608ac3cd667efca5e8
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: supply chain health status\non:\n workflow_dispatch:\n schedule:\n - cron: '0 0 * * *'\n push:\n branches:\n - dev\n paths:\n - '.github/workflows/supply-chain.yml'\n - '**/Cargo.lock'\n - '**/Cargo.toml'\njobs:\n cargo-vet:\n name: check rust dependencies with cargo vet\n runs-on: ubuntu-latest\n env:\n CARGO_VET_VERSION: 0.9.1\n steps:\n - uses: actions/checkout@master\n - name: Install Rust\n run: rustup update stable && rustup default stable\n\n - uses: actions/cache@v4\n with:\n path: ${{ runner.tool_cache }}/cargo-vet\n key: cargo-vet-bin-${{ env.CARGO_VET_VERSION }}\n\n - name: Add the tool cache directory to the search path\n run: echo "${{ runner.tool_cache }}/cargo-vet/bin" >> $GITHUB_PATH\n\n - name: Ensure that the tool cache is populated with the cargo-vet binary\n run: cargo install --root ${{ runner.tool_cache }}/cargo-vet --version ${{ env.CARGO_VET_VERSION }} cargo-vet\n\n # Enable this again to break the workflow once we have a reasonable amount of suggestions to get to a clean base line\n # - name: Invoke cargo-vet\n # run: cargo vet --locked\n\n - name: Provide audit suggestions\n run: cargo vet suggest\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\supply-chain.yml
|
supply-chain.yml
|
YAML
| 1,407 | 0.8 | 0 | 0.157895 |
python-kit
| 947 |
2024-12-13T04:54:12.468961
|
GPL-3.0
| false |
6f7e9aa67404747127b1f7028e20acda
|
# Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n# SPDX-License-Identifier: Apache-2.0\n# SPDX-License-Identifier: MIT\n\nname: Udeps\n\non:\n push:\n branches:\n - dev\n\nenv:\n RUST_BACKTRACE: 1\n CARGO_PROFILE_DEV_DEBUG: 0 # This would add unnecessary bloat to the target folder, decreasing cache efficiency.\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n changes:\n runs-on: ubuntu-latest\n outputs:\n tauri: ${{ steps.filter.outputs.tauri }}\n build: ${{ steps.filter.outputs.build }}\n codegen: ${{ steps.filter.outputs.codegen }}\n macros: ${{ steps.filter.outputs.macros }}\n runtime: ${{ steps.filter.outputs.runtime }}\n wry: ${{ steps.filter.outputs.wry }}\n utils: ${{ steps.filter.outputs.utils }}\n bundler: ${{ steps.filter.outputs.bundler }}\n cli: ${{ steps.filter.outputs.cli }}\n steps:\n - uses: actions/checkout@v4\n - uses: dorny/paths-filter@v3\n id: filter\n with:\n filters: |\n tauri:\n - 'crates/tauri/**'\n - '!crates/tauri/scripts/**'\n build:\n - 'crates/tauri-build/**'\n codegen:\n - 'crates/tauri-codegen/**'\n macros:\n - 'crates/tauri-macros/**'\n runtime:\n - 'crates/tauri-runtime/**'\n wry:\n - 'crates/tauri-runtime-wry/**'\n utils:\n - 'crates/tauri-utils/**'\n bundler:\n - 'crates/tauri-bundler/**'\n cli:\n - 'crates/tauri-cli/**'\n macossign:\n - 'crates/tauri-macos-sign/**'\n\n setup:\n runs-on: ubuntu-latest\n needs: changes\n if: |\n needs.changes.outputs.tauri == 'true' ||\n needs.changes.outputs.build == 'true' ||\n needs.changes.outputs.codegen == 'true' ||\n needs.changes.outputs.macros == 'true' ||\n needs.changes.outputs.runtime == 'true' ||\n needs.changes.outputs.wry == 'true' ||\n needs.changes.outputs.utils == 'true' ||\n needs.changes.outputs.bundler == 'true' ||\n needs.changes.outputs.cli == 'true' ||\n needs.changes.outputs.macossign == 'true'\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Install Rust nightly\n uses: dtolnay/rust-toolchain@nightly\n\n - name: Install udeps\n run: cargo install cargo-udeps --locked --force\n\n - name: Upload udeps\n uses: actions/upload-artifact@v4\n with:\n name: udeps\n path: '~/.cargo/bin/cargo-udeps'\n if-no-files-found: error\n\n - name: Create udeps matrix\n id: create-matrix\n env:\n TAURI: ${{ needs.changes.outputs.tauri == 'true' }}\n BUILD: ${{ needs.changes.outputs.build == 'true' }}\n CODEGEN: ${{ needs.changes.outputs.codegen == 'true' }}\n MACROS: ${{ needs.changes.outputs.macros == 'true' }}\n RUNTIME: ${{ needs.changes.outputs.runtime == 'true' }}\n WRY: ${{ needs.changes.outputs.wry == 'true' }}\n UTILS: ${{ needs.changes.outputs.utils == 'true' }}\n BUNDLER: ${{ needs.changes.outputs.bundler == 'true' }}\n CLI: ${{ needs.changes.outputs.cli == 'true' }}\n MACOSSIGN: ${{ needs.changes.outputs.macossign == 'true' }}\n run: |\n crates=()\n if [ "${TAURI}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri\""; fi\n if [ "${BUILD}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-build\""; fi\n if [ "${CODEGEN}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-codegen\""; fi\n if [ "${MACROS}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-macros\""; fi\n if [ "${RUNTIME}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-runtime\""; fi\n if [ "${WRY}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-runtime-wry\""; fi\n if [ "${UTILS}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-utils\""; fi\n if [ "${BUNDLER}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-bundler\""; fi\n if [ "${CLI}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-cli\""; fi\n if [ "${MACOSSIGN}" == "true" ]; then crates[${#crates[@]}]="\"./crates/tauri-macos-sign\""; fi\n echo "matrix=[$crates]" >> "$GITHUB_OUTPUT"\n outputs:\n matrix: ${{ steps.create-matrix.outputs.matrix }}\n\n udeps:\n runs-on: ubuntu-latest\n needs: setup\n strategy:\n matrix:\n path: ${{ fromJson(needs.setup.outputs.matrix) }}\n steps:\n - uses: actions/checkout@v4\n\n - name: Install Rust nightly\n uses: dtolnay/rust-toolchain@nightly\n\n - name: install dependencies\n run: |\n sudo apt-get update\n sudo apt-get install -y libgtk-3-dev\n\n - uses: Swatinem/rust-cache@v2\n\n - name: Download udeps\n uses: actions/[email protected]\n with:\n name: udeps\n path: '~/.cargo/bin'\n\n - run: chmod +x $HOME/.cargo/bin/cargo-udeps\n\n - name: Install required packages\n run: |\n sudo apt-get update\n sudo apt-get install -y webkit2gtk-4.1 libayatana-appindicator3-dev\n\n - name: Run udeps\n run: cargo udeps --manifest-path ${{ matrix.path }}/Cargo.toml --all-targets --all-features\n
|
dataset_sample\yaml\tauri-apps_tauri\.github\workflows\udeps.yml
|
udeps.yml
|
YAML
| 5,399 | 0.95 | 0.077922 | 0.022222 |
node-utils
| 894 |
2024-02-05T19:40:38.637862
|
MIT
| false |
5da2bc292b2fa9a411013cfc4504ec6e
|
name: {{app.name}}\noptions:\n bundleIdPrefix: {{app.identifier}}\n deploymentTarget:\n iOS: {{apple.ios-version}}\nfileGroups: [{{join file-groups}}]\nconfigs:\n debug: debug\n release: release\nsettingGroups:\n app:\n base:\n PRODUCT_NAME: {{app.stylized-name}}\n PRODUCT_BUNDLE_IDENTIFIER: {{app.identifier}}\n {{#if apple.development-team}}\n DEVELOPMENT_TEAM: {{apple.development-team}}\n {{/if}}\ntargetTemplates:\n app:\n type: application\n sources:\n - path: Sources\n scheme:\n environmentVariables:\n RUST_BACKTRACE: full\n RUST_LOG: info\n settings:\n groups: [app]\ntargets:\n {{app.name}}_iOS:\n type: application\n platform: iOS\n sources:\n - path: Sources\n - path: Assets.xcassets\n - path: Externals\n - path: {{app.name}}_iOS\n - path: {{app.asset-dir}}\n buildPhase: resources\n type: folder\n {{~#each asset-catalogs}}\n - {{prefix-path this}}{{/each}}\n {{~#each ios-additional-targets}}\n - path: {{prefix-path this}}{{/each}}\n - path: LaunchScreen.storyboard\n info:\n path: {{app.name}}_iOS/Info.plist\n properties:\n LSRequiresIPhoneOS: true\n UILaunchStoryboardName: LaunchScreen\n UIRequiredDeviceCapabilities: [arm64, metal]\n UISupportedInterfaceOrientations:\n - UIInterfaceOrientationPortrait\n - UIInterfaceOrientationLandscapeLeft\n - UIInterfaceOrientationLandscapeRight\n UISupportedInterfaceOrientations~ipad:\n - UIInterfaceOrientationPortrait\n - UIInterfaceOrientationPortraitUpsideDown\n - UIInterfaceOrientationLandscapeLeft\n - UIInterfaceOrientationLandscapeRight\n CFBundleShortVersionString: {{apple.bundle-version-short}}\n CFBundleVersion: {{apple.bundle-version}}\n {{~#each apple.plist-pairs}}\n {{this.key}}: {{this.value}}{{/each}}\n entitlements:\n path: {{app.name}}_iOS/{{app.name}}_iOS.entitlements\n scheme:\n environmentVariables:\n RUST_BACKTRACE: full\n RUST_LOG: info\n {{~#if ios-command-line-arguments}}\n commandLineArguments:\n {{~#each ios-command-line-arguments}}\n "{{this}}": true\n {{/each}}{{~/if}}\n settings:\n base:\n ENABLE_BITCODE: false\n ARCHS: [{{join ios-valid-archs}}]\n VALID_ARCHS: {{~#each ios-valid-archs}} {{this}} {{/each}}\n LIBRARY_SEARCH_PATHS[arch=x86_64]: $(inherited) $(PROJECT_DIR)/Externals/x86_64/$(CONFIGURATION) $(SDKROOT)/usr/lib/swift $(TOOLCHAIN_DIR)/usr/lib/swift/$(PLATFORM_NAME) $(TOOLCHAIN_DIR)/usr/lib/swift-5.0/$(PLATFORM_NAME)\n LIBRARY_SEARCH_PATHS[arch=arm64]: $(inherited) $(PROJECT_DIR)/Externals/arm64/$(CONFIGURATION) $(SDKROOT)/usr/lib/swift $(TOOLCHAIN_DIR)/usr/lib/swift/$(PLATFORM_NAME) $(TOOLCHAIN_DIR)/usr/lib/swift-5.0/$(PLATFORM_NAME)\n ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES: true\n EXCLUDED_ARCHS[sdk=iphoneos*]: x86_64\n groups: [app]\n dependencies:\n - framework: {{ lib-output-file-name }}\n embed: false\n {{~#each ios-libraries}}\n - framework: {{this}}\n embed: false{{/each}}{{#if ios-vendor-frameworks}}{{~#each ios-vendor-frameworks}}\n - framework: {{this}}{{/each}}{{/if}}{{#if ios-vendor-sdks}}{{~#each ios-vendor-sdks}}\n - sdk: {{prefix-path this}}{{/each}}{{/if}}\n - sdk: CoreGraphics.framework\n - sdk: Metal.framework\n - sdk: MetalKit.framework\n - sdk: QuartzCore.framework\n - sdk: Security.framework\n - sdk: UIKit.framework{{#if this.ios-frameworks}}{{~#each ios-frameworks}}\n - sdk: {{this}}.framework{{/each}}{{/if}}\n - sdk: WebKit.framework\n preBuildScripts:\n {{~#each ios-pre-build-scripts}}{{#if this.path}}\n - path {{this.path}}{{/if}}{{#if this.script}}\n - script: {{this.script}}{{/if}}{{#if this.name}}\n name: {{this.name}}{{/if}}{{#if this.input-files}}\n inputFiles: {{~#each this.input-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.output-files}}\n outputFiles: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.input-file-lists}}\n inputFileLists: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.output-file-lists}}\n outputFileLists: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.shell}}\n shell: {{this.shell}}{{/if}}{{#if this.show-env-vars}}\n showEnvVars: {{this.show_env_vars}}{{/if}}{{#if this.run-only-when-installing}}\n runOnlyWhenInstalling: {{this.run-only-when-installing}}{{/if}}{{#if this.based-on-dependency-analysis}}\n basedOnDependencyAnalysis: {{this.based-on-dependency-analysis}}{{/if}}{{#if this.discovered-dependency-file}}\n discoveredDependencyFile: {{this.discovered-dependency-file}}{{/if}}\n {{~/each}}\n\n - script: {{ tauri-binary }} {{ tauri-binary-args-str }} -v --platform ${PLATFORM_DISPLAY_NAME:?} --sdk-root ${SDKROOT:?} --framework-search-paths "${FRAMEWORK_SEARCH_PATHS:?}" --header-search-paths "${HEADER_SEARCH_PATHS:?}" --gcc-preprocessor-definitions "${GCC_PREPROCESSOR_DEFINITIONS:-}" --configuration ${CONFIGURATION:?} ${FORCE_COLOR} ${ARCHS:?}\n name: Build Rust Code\n basedOnDependencyAnalysis: false\n outputFiles:\n - $(SRCROOT)/Externals/x86_64/${CONFIGURATION}/{{ lib-output-file-name }}\n - $(SRCROOT)/Externals/arm64/${CONFIGURATION}/{{ lib-output-file-name }}\n {{~#if ios-post-compile-scripts}}\n postCompileScripts:\n {{~#each ios-post-compile-scripts}}{{#if this.path}}\n - path {{this.path}}{{/if}}{{#if this.script}}\n - script: {{this.script}}{{/if}}{{#if this.name}}\n name: {{this.name}}{{/if}}{{#if this.input-files}}\n inputFiles: {{~#each this.input-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.output-files}}\n outputFiles: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.input-file-lists}}\n inputFileLists: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.output-file-lists}}\n outputFileLists: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.shell}}\n shell: {{this.shell}}{{/if}}{{#if this.show-env-vars}}\n showEnvVars: {{this.show_env_vars}}{{/if}}{{#if this.run-only-when-installing}}\n runOnlyWhenInstalling: {{this.run-only-when-installing}}{{/if}}{{#if this.based-on-dependency-analysis}}\n basedOnDependencyAnalysis: {{this.based-on-dependency-analysis}}{{/if}}{{#if this.discovered-dependency-file}}\n discoveredDependencyFile: {{this.discovered-dependency-file}}{{/if}}\n {{~/each~}}\n {{~/if~}}\n {{~#if ios-post-build-scripts}}\n postBuildScripts:\n {{~#each ios-post-build-scripts}}{{#if this.path}}\n - path {{this.path}}{{/if}}{{#if this.script}}\n - script: {{this.script}}{{/if}}{{#if this.name}}\n name: {{this.name}}{{/if}}{{#if this.input-files}}\n inputFiles: {{~#each this.input-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.output-files}}\n outputFiles: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.input-file-lists}}\n inputFileLists: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.output-file-lists}}\n outputFileLists: {{~#each this.output-files}}\n - {{this}}{{/each}}{{/if}}{{#if this.shell}}\n shell: {{this.shell}}{{/if}}{{#if this.show-env-vars}}\n showEnvVars: {{this.show_env_vars}}{{/if}}{{#if this.run-only-when-installing}}\n runOnlyWhenInstalling: {{this.run-only-when-installing}}{{/if}}{{#if this.based-on-dependency-analysis}}\n basedOnDependencyAnalysis: {{this.based-on-dependency-analysis}}{{/if}}{{#if this.discovered-dependency-file}}\n discoveredDependencyFile: {{this.discovered-dependency-file}}{{/if}}\n {{~/each~}}\n {{~/if}}\n
|
dataset_sample\yaml\tauri-apps_tauri\crates\tauri-cli\templates\mobile\ios\project.yml
|
project.yml
|
YAML
| 7,949 | 0.8 | 0.508876 | 0 |
python-kit
| 421 |
2024-07-11T20:32:06.204894
|
BSD-3-Clause
| false |
a4f18295f73698e32caa8bd9ab30b260
|
{{{{raw}}}}\nname: Audit\n\non:\n schedule:\n - cron: '0 0 * * *'\n push:\n branches:\n - main\n paths:\n - ".github/workflows/audit.yml"\n - "**/Cargo.lock"\n - "**/Cargo.toml"\n pull_request:\n branches:\n - main\n paths:\n - ".github/workflows/audit.yml"\n - "**/Cargo.lock"\n - "**/Cargo.toml"\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n audit:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: rustsec/audit-check@v1\n with:\n token: ${{ secrets.GITHUB_TOKEN }}\n{{{{/raw}}}}\n
|
dataset_sample\yaml\tauri-apps_tauri\crates\tauri-cli\templates\plugin\.github\workflows\audit.yml
|
audit.yml
|
YAML
| 625 | 0.7 | 0 | 0 |
react-lib
| 194 |
2023-08-19T10:49:42.609851
|
BSD-3-Clause
| false |
a513182b5329c0b013d8daf1b0c4155c
|
{{{{raw}}}}\nname: Check\n\non:\n push:\n branches:\n - main\n paths:\n - ".github/workflows/check.yml"\n - "**/*.rs"\n - "**/Cargo.toml"\n pull_request:\n branches:\n - main\n paths:\n - ".github/workflows/check.yml"\n - "**/*.rs"\n - "**/Cargo.toml"\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n fmt:\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n - uses: dtolnay/rust-toolchain@stable\n with:\n components: rustfmt\n - run: cargo fmt --all -- --check\n\n clippy:\n strategy:\n fail-fast: false\n matrix:\n platform: [ubuntu-latest, macos-latest, windows-latest]\n\n runs-on: ${{ matrix.platform }}\n\n steps:\n - uses: actions/checkout@v4\n - uses: dtolnay/rust-toolchain@stable\n with:\n components: clippy\n - name: install webkit2gtk\n if: matrix.platform == 'ubuntu-latest'\n run: |\n sudo apt-get update\n sudo apt-get install -y webkit2gtk-4.1\n - uses: Swatinem/rust-cache@v2\n - run: cargo clippy --all-targets --all-features -- -D warnings\n{{{{/raw}}}}\n
|
dataset_sample\yaml\tauri-apps_tauri\crates\tauri-cli\templates\plugin\.github\workflows\clippy.yml
|
clippy.yml
|
YAML
| 1,185 | 0.8 | 0.018182 | 0 |
node-utils
| 372 |
2025-02-01T00:19:36.587770
|
GPL-3.0
| false |
cc44c2eca4842bf6c04153ec149b07ea
|
before:\n hooks:\n - go mod download\n\narchives:\n - id: default\n builds:\n - temporal-server\n - temporal-cassandra-tool\n - temporal-sql-tool\n name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"\n format_overrides:\n - goos: windows\n format: zip\n files:\n - ./config/*\n\nbuilds:\n - id: temporal-server\n dir: cmd/server\n binary: temporal-server\n env:\n - CGO_ENABLED=0\n goos:\n - linux\n - darwin\n - windows\n goarch:\n - amd64\n - arm64\n - id: temporal-cassandra-tool\n dir: cmd/tools/cassandra\n binary: temporal-cassandra-tool\n env:\n - CGO_ENABLED=0\n goos:\n - linux\n - darwin\n - windows\n goarch:\n - amd64\n - arm64\n - id: temporal-sql-tool\n dir: cmd/tools/sql\n binary: temporal-sql-tool\n env:\n - CGO_ENABLED=0\n goos:\n - linux\n - darwin\n - windows\n goarch:\n - amd64\n - arm64\n - id: tdbg\n dir: cmd/tools/tdbg\n binary: tdbg\n env:\n - CGO_ENABLED=0\n goos:\n - linux\n - darwin\n - windows\n goarch:\n - amd64\n - arm64\n\nchecksum:\n name_template: 'checksums.txt'\n algorithm: sha256\n\nchangelog:\n skip: true\n\nannounce:\n skip: "true"\n
|
dataset_sample\yaml\temporalio_temporal\.goreleaser.yml
|
.goreleaser.yml
|
YAML
| 1,261 | 0.8 | 0 | 0 |
vue-tools
| 710 |
2024-08-25T21:29:38.299977
|
Apache-2.0
| false |
d0bb4bbbc584f00f10e5b81f4ca65fcf
|
# These are supported funding model platforms\n\ngithub: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]\npatreon: # Replace with a single Patreon username\nopen_collective: # Replace with a single Open Collective username\nko_fi: # Replace with a single Ko-fi username\ntidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel\ncommunity_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry\nliberapay: # Replace with a single Liberapay username\nissuehunt: # Replace with a single IssueHunt username\notechie: # Replace with a single Otechie username\ncustom: # Replace with a single custom sponsorship URL\n
|
dataset_sample\yaml\tennc_webshell\.github\FUNDING.yml
|
FUNDING.yml
|
YAML
| 686 | 0.8 | 0 | 0.090909 |
node-utils
| 761 |
2024-10-19T21:42:07.099165
|
Apache-2.0
| false |
6613176c08a81d749057cee611abce9f
|
theme: jekyll-theme-cayman\nshow_downloads: false\ntitle: CloudMoe Windows 10 Toolkit Digital Edition\ndescription: CloudMoe Windows 10 Acivation Toolkit get digital license, the best open source Win 10 activator in GitHub. <br>GitHub 上最棒的开源 Win10 数字权利(数字许可证)激活工具!\n# SEORelate\ngoogle_site_verification :\nbing_site_verification :\nalexa_site_verification :\nyandex_site_verification :\ntags: [Digital License, Windows 10 Acivation Toolkit, Activator]\nnavbar-links:\n Resources:\n - Learn markdown: "http://www.markdowntutorial.com/"\n - GitHub Pages: "https://pages.github.com/"\n \n
|
dataset_sample\yaml\TGSAN_CMWTAT_Digital_Edition\_config.yml
|
_config.yml
|
YAML
| 627 | 0.8 | 0 | 0.071429 |
react-lib
| 883 |
2024-03-09T04:52:54.886796
|
GPL-3.0
| false |
fa88f2a88335cc8e7ba5fdebbe17bf6c
|
tasks:\n - init: >\n test -f go.mod && go get -v ./...;\n wget https://github.com/prometheus/prometheus/releases/download/v2.22.2/prometheus-2.22.2.linux-amd64.tar.gz --directory-prefix=/tmp;\n cd /tmp && tar xvfz prometheus-*.tar.gz && rm prometheus-*.tar.gz;\n mv prometheus-*/prometheus /workspace/go/bin/;\n wget https://raw.githubusercontent.com/vishnubob/wait-for-it/master/wait-for-it.sh && chmod u+x ./wait-for-it.sh && cd /workspace/thanos/scripts && mv /tmp/wait-for-it.sh .;\n ./quickstart.sh;\n for i in {0..2};do ./wait-for-it.sh -q localhost:909"$i" -- && echo "Prometheus instance no.$((i+1)) localhost:909$i is up"; done;\n ./wait-for-it.sh localhost:10914 -- && echo "Thanos frontend at localhost:10914 is up";\nports:\n - port: 10914\n onOpen: open-preview\n
|
dataset_sample\yaml\thanos-io_thanos\.gitpod.yml
|
.gitpod.yml
|
YAML
| 810 | 0.8 | 0.538462 | 0 |
python-kit
| 472 |
2024-04-10T08:03:06.892499
|
MIT
| false |
0104b64b4d0197c49591702d06f85f0d
|
# This file contains all available configuration options\n# with their default values.\n\n# options for analysis running\nrun:\n # timeout for analysis, e.g. 30s, 5m, default is 1m\n timeout: 5m\n\n # exit code when at least one issue was found, default is 1\n issues-exit-code: 1\n\n# output configuration options\noutput:\n # The formats used to render issues.\n formats:\n - format: colored-line-number\n path: stdout\n\n # print lines of code with issue, default is true\n print-issued-lines: true\n\n # print linter name in the end of issue text, default is true\n print-linter-name: true\n\nlinters:\n enable:\n # Sorted alphabetically.\n - errcheck\n - goconst\n - godot\n - gofmt\n - goimports\n - gosimple\n - govet\n - ineffassign\n - misspell\n - staticcheck\n - typecheck\n - unparam\n - unused\n - promlinter\n\nlinters-settings:\n errcheck:\n # List of functions to exclude from checking, where each entry is a single function to exclude.\n exclude-functions:\n - (github.com/go-kit/log.Logger).Log\n - fmt.Fprintln\n - fmt.Fprint\n misspell:\n locale: US\n goconst:\n min-occurrences: 5\n\nissues:\n exclude-rules:\n # We don't check metrics naming in the tests.\n - path: _test\.go\n linters:\n - promlinter\n # These are not being checked since these methods exist\n # so that no one else could implement them.\n - linters:\n - unused\n text: "SourceStoreAPI.implementsStoreAPI"\n - linters:\n - unused\n text: "SourceStoreAPI.producesBlocks"\n - linters:\n - unused\n text: "Source.producesBlocks"\n - linters:\n - unused\n text: "newMockAlertmanager"\n - linters:\n - unused\n text: "ruleAndAssert"\n # Which dirs to exclude: issues from them won't be reported.\n exclude-dirs:\n - vendor\n - internal/cortex\n
|
dataset_sample\yaml\thanos-io_thanos\.golangci.yml
|
.golangci.yml
|
YAML
| 1,848 | 0.95 | 0.037037 | 0.205479 |
python-kit
| 803 |
2024-08-29T04:53:32.603261
|
MIT
| false |
ec965d08c97246028884acaf8651597b
|
go:\n version: 1.23\nrepository:\n path: github.com/thanos-io/thanos\nbuild:\n binaries:\n - name: thanos\n path: ./cmd/thanos\n flags: -a -tags netgo\n ldflags: |\n -X github.com/prometheus/common/version.Version={{.Version}}\n -X github.com/prometheus/common/version.Revision={{.Revision}}\n -X github.com/prometheus/common/version.Branch={{.Branch}}\n -X github.com/prometheus/common/version.BuildUser={{user}}@{{host}}\n -X github.com/prometheus/common/version.BuildDate={{date "20060102-15:04:05"}}\ncrossbuild:\n platforms:\n - linux/amd64\n - darwin/amd64\n - linux/arm64\n - windows/amd64\n - freebsd/amd64\n - linux/ppc64le\n
|
dataset_sample\yaml\thanos-io_thanos\.promu.yml
|
.promu.yml
|
YAML
| 658 | 0.7 | 0 | 0 |
awesome-app
| 131 |
2023-07-29T13:27:55.026192
|
BSD-3-Clause
| false |
72155f467be59f577898e9400e3c86f7
|
# NOTE: Current plan gives 1500 build minutes per month.\nversion: 2.1\n\norbs:\n go: circleci/[email protected]\n git-shallow-clone: guitarrapc/[email protected]\n\nexecutors:\n golang:\n docker:\n - image: cimg/go:1.24.0-node\n golang-test:\n docker:\n - image: cimg/go:1.24.0-node\n - image: quay.io/thanos/docker-swift-onlyone-authv2-keystone:v0.1\n\njobs:\n # Cross build is needed for publish_release but needs to be done outside of docker.\n cross_build:\n machine: true\n working_directory: /home/circleci/.go_workspace/src/github.com/thanos-io/thanos\n environment:\n GOBIN: "/home/circleci/.go_workspace/go/bin"\n PROMU_VERSION: "0.5.0"\n steps:\n - git-shallow-clone/checkout\n - run: mkdir -p ${GOBIN}\n - run: curl -L "https://github.com/prometheus/promu/releases/download/v${PROMU_VERSION}/promu-${PROMU_VERSION}.$(go env GOOS)-$(go env GOARCH).tar.gz" | tar --strip-components=1 -xzf - -C ${GOBIN}\n - run: mv -f ${GOBIN}/promu "${GOBIN}/promu-v${PROMU_VERSION}"\n - run: make crossbuild -W ${GOBIN}/promu-v${PROMU_VERSION} # Ignore make dependency, it needs to be enforced somehow.\n - persist_to_workspace:\n root: .\n paths:\n - .build\n\n publish_main:\n executor: golang\n steps:\n - git-shallow-clone/checkout\n - go/mod-download-cached\n - setup_remote_docker:\n version: docker24\n - attach_workspace:\n at: .\n # Register qemu to support multi-arch.\n - run: docker run --privileged tonistiigi/binfmt:qemu-v6.1.0 --install all\n - run: make crossbuild\n - run: make docker-build\n - run: make docker-test\n # Upload to both dockerhub and quay.io.\n - run: echo "${DOCKERHUB_PASSWORD}" | docker login -u="${DOCKERHUB_USERNAME}" --password-stdin\n - run: make docker-push DOCKER_IMAGE_REPO=thanosio/thanos\n - run: make docker-manifest DOCKER_IMAGE_REPO=thanosio/thanos\n - run: echo "${QUAY_PASSWORD}" | docker login -u="${QUAY_USERNAME}" quay.io --password-stdin\n - run: make docker-push\n - run: make docker-manifest\n\n publish_release:\n executor: golang\n steps:\n - git-shallow-clone/checkout\n - go/mod-download-cached\n - setup_remote_docker:\n version: docker24\n - attach_workspace:\n at: .\n - run: make tarballs-release\n - store_artifacts:\n path: .tarballs\n destination: releases\n # Register qemu to support multi-arch.\n - run: docker run --privileged tonistiigi/binfmt:qemu-v6.1.0 --install all\n - run: make docker-build\n - run: make docker-test\n # Upload to both dockerhub and quay.io.\n - run: echo "${DOCKERHUB_PASSWORD}" | docker login -u="${DOCKERHUB_USERNAME}" --password-stdin\n - run: make docker-push DOCKER_IMAGE_REPO=thanosio/thanos DOCKER_IMAGE_TAG=$CIRCLE_TAG\n - run: make docker-manifest DOCKER_IMAGE_REPO=thanosio/thanos DOCKER_IMAGE_TAG=$CIRCLE_TAG\n - run: echo "${QUAY_PASSWORD}" | docker login -u="${QUAY_USERNAME}" quay.io --password-stdin\n - run: make docker-push DOCKER_IMAGE_TAG=$CIRCLE_TAG\n - run: make docker-manifest DOCKER_IMAGE_TAG=$CIRCLE_TAG\n\nworkflows:\n version: 2\n thanos:\n jobs:\n - publish_main:\n filters:\n branches:\n only: main\n - cross_build:\n filters:\n tags:\n only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/\n branches:\n ignore: /.*/\n - publish_release:\n requires:\n - cross_build\n filters:\n tags:\n only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/\n branches:\n ignore: /.*/\n
|
dataset_sample\yaml\thanos-io_thanos\.circleci\config.yml
|
config.yml
|
YAML
| 3,689 | 0.95 | 0.009615 | 0.061224 |
python-kit
| 19 |
2024-04-12T00:47:39.116727
|
MIT
| false |
a0c80b9f0e4ff909b3bfc7991c18463b
|
---\nversion: 2\nupdates:\n - package-ecosystem: "gomod"\n directory: "/"\n schedule:\n interval: "weekly"\n open-pull-requests-limit: 20\n - package-ecosystem: "docker"\n directory: "/"\n schedule:\n interval: "weekly"\n - package-ecosystem: "github-actions"\n directory: "/"\n schedule: \n interval: weekly\n
|
dataset_sample\yaml\thanos-io_thanos\.github\dependabot.yml
|
dependabot.yml
|
YAML
| 333 | 0.7 | 0 | 0 |
node-utils
| 113 |
2023-09-28T15:39:32.696431
|
Apache-2.0
| false |
b9cdebabe7de562a6b64afbacd49ca9a
|
# Configuration for probot-stale - https://github.com/probot/stale\n\n# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)\nonlyLabels: []\n\n# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable.\n# We want stale bot to notify us that something is stale so we can revisit it.\n# If one issue is marked as 'reminder' by the reminder bot, we don't mark it as 'stale' again.\nexemptLabels:\n # This label is hardcoded on remind bot (https://probot.github.io/apps/reminders/) and is used by remind bot when\n # issue is being reminded.\n - reminder\n - "state: someone-working-on-it"\n - dont-go-stale\n\n# Set to true to ignore issues in a project (defaults to false)\nexemptProjects: false\n# Set to true to ignore issues in a milestone (defaults to false)\nexemptMilestones: false\n# Set to true to ignore issues with an assignee (defaults to false)\nexemptAssignees: false\n# Label to use when marking as stale\nstaleLabel: stale\n\npulls:\n daysUntilClose: 7\n daysUntilStale: 30\n markComment: >\n Hello 👋 Looks like there was no activity on this amazing PR for the last 30 days.\n\n **Do you mind updating us on the status?** Is there anything we can help with? If you plan to still work on it, just comment on this PR or push a commit. Thanks! 🤗\n\n If there will be no activity in the next week, this issue will be closed (we can always reopen a PR if you get back to this!).\n Alternatively, use [`remind` command](https://probot.github.io/apps/reminders/) if you wish to be reminded at some point in future.\n #unmarkComment: No need for unmark comment.\n closeComment: >\n Closing for now as promised, let us know if you need this to be reopened! 🤗\n\nissues:\n daysUntilClose: 14\n daysUntilStale: 60\n markComment: >\n Hello 👋 Looks like there was no activity on this issue for the last two months.\n\n **Do you mind updating us on the status?** Is this still reproducible or needed? If yes, just comment on this PR or push a commit. Thanks! 🤗\n\n If there will be no activity in the next two weeks, this issue will be closed (we can always reopen an issue if we need!).\n Alternatively, use [`remind` command](https://probot.github.io/apps/reminders/) if you wish to be reminded at some point in future.\n #unmarkComment: No need for unmark comment.\n closeComment: >\n Closing for now as promised, let us know if you need this to be reopened! 🤗\n\n# Limit the number of actions per hour, from 1-30. Default is 30\nlimitPerRun: 30\n
|
dataset_sample\yaml\thanos-io_thanos\.github\stale.yml
|
stale.yml
|
YAML
| 2,549 | 0.8 | 0.259259 | 0.363636 |
react-lib
| 214 |
2024-03-08T10:30:08.537762
|
GPL-3.0
| false |
ecd5a51cf5ed1a332d8d592af08dd2ee
|
blank_issues_enabled: false # Show or hide the Create a blank issue choice when users select New issue.\ncontact_links:\n - name: "Questions via Thanos Community Support via the CNCF slack - #thanos"\n url: https://cloud-native.slack.com/archives/CK5RSSC10.\n about: "Join us for questions, answers or Thanos related chat. Please do create issues on Github for better collaboration. If you don't have an account, sign up at https://slack.cncf.io/"\n - name: "Question via Thanos Discussions (similar to Stack Overflow)"\n url: https://github.com/thanos-io/thanos/discussions\n about: "Please ask and answer questions here for async response."
|
dataset_sample\yaml\thanos-io_thanos\.github\ISSUE_TEMPLATE\config.yml
|
config.yml
|
YAML
| 649 | 0.8 | 0.428571 | 0 |
node-utils
| 526 |
2024-03-14T21:26:16.048694
|
Apache-2.0
| false |
1c5695710f5c5e582358f7df0f63cc91
|
# For most projects, this workflow file will not need changing; you simply need\n# to commit it to your repository.\n#\n# You may wish to alter this file to override the set of languages analyzed,\n# or to provide custom queries or build logic.\n#\n# ******** NOTE ********\n# We have attempted to detect the languages in your repository. Please check\n# the `language` matrix defined below to confirm you have the correct set of\n# supported CodeQL languages.\n#\nname: "CodeQL"\n\non:\n push:\n branches: [ main ]\n pull_request:\n # The branches below must be a subset of the branches above\n branches: [ main ]\n schedule:\n - cron: '30 12 * * 1'\n\npermissions: \n contents: read\n security-events: write\n\njobs:\n analyze:\n name: Analyze\n runs-on: ubuntu-latest\n\n strategy:\n fail-fast: false\n matrix:\n language: [ 'go', 'javascript' ]\n # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]\n # Learn more:\n # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed\n\n steps:\n - name: Checkout code\n uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7\n\n - name: Set up Go\n uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # v5.0.2\n with:\n go-version: 1.22.x\n\n # Initializes the CodeQL tools for scanning.\n - name: Initialize CodeQL\n uses: github/codeql-action/init@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13\n with:\n languages: ${{ matrix.language }}\n config-file: ./.github/codeql/codeql-config.yml\n # If you wish to specify custom queries, you can do so here or in a config file.\n # By default, queries listed here will override any specified in a config file.\n # Prefix the list here with "+" to use these queries and those in the config file.\n # queries: ./path/to/local/query, your-org/your-repo/queries@main\n\n # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).\n # If this step fails, then you should remove it and run the build manually (see below)\n - name: Autobuild\n uses: github/codeql-action/autobuild@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13\n\n # ℹ️ Command-line programs to run using the OS shell.\n # 📚 https://git.io/JvXDl\n\n # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines\n # and modify them (or add more) to build your code if your project\n # uses a compiled language\n\n #- run: |\n # make bootstrap\n # make release\n\n - name: Perform CodeQL Analysis\n uses: github/codeql-action/analyze@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13\n
|
dataset_sample\yaml\thanos-io_thanos\.github\workflows\codeql-analysis.yml
|
codeql-analysis.yml
|
YAML
| 2,810 | 0.8 | 0.025974 | 0.461538 |
awesome-app
| 502 |
2025-01-23T04:30:47.261425
|
MIT
| false |
a82d8124760adb594b134e751b2c21c6
|
name: react\n\non:\n push:\n branches:\n - main\n pull_request:\n\npermissions: \n contents: read\n\njobs:\n build:\n runs-on: ubuntu-latest\n strategy:\n matrix:\n node: [ '14' ]\n name: React UI test on Node ${{ matrix.node }}\n steps:\n - name: Checkout code\n uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7\n\n - name: Install nodejs\n uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3\n with:\n node-version: ${{ matrix.node }}\n\n - uses: actions/cache@0c907a75c2c80ebcb7f088228285e798b750cf8f # v4.2.1\n with:\n path: ~/.npm\n key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}\n restore-keys: |\n ${{ runner.os }}-node-\n\n - run: CI=false make check-react-app\n - run: make react-app-test\n
|
dataset_sample\yaml\thanos-io_thanos\.github\workflows\react.yml
|
react.yml
|
YAML
| 868 | 0.8 | 0 | 0 |
vue-tools
| 685 |
2024-11-24T13:10:30.241029
|
MIT
| false |
9987b7af04eafa72a9b8cf0add7e3015
|
---\nadopters:\n- name: Aiven\n url: https://aiven.io\n logo: aivenlogo.png\n- name: Infinite Devices\n url: https://infinimesh.io\n logo: infdevlogo.png\n- name: Monzo\n url: https://www.monzo.com\n logo: monzo.png\n- name: UW\n url: https://uw.co.uk/\n logo: utilitywarehouse.png\n- name: Adform\n url: https://site.adform.com\n logo: Adform_logo_RGB.png\n- name: Seznam.cz\n url: https://www.seznam.cz/\n logo: seznam.png\n- name: tiket.com\n url: https://www.tiket.com\n logo: tiket.png\n- name: Uswitch\n url: https://www.uswitch.com\n logo: uswitch.png\n- name: Qonto\n url: https://qonto.eu\n logo: qonto.png\n- name: Meltwater\n url: https://underthehood.meltwater.com\n logo: meltwater.png\n- name: Jetstack\n url: https://www.jetstack.io\n logo: jetstack.png\n- name: ProSiebenSat.1\n url: https://www.prosiebensat1.com/en/\n logo: p7s1.png\n- name: Adobe\n url: https://www.adobe.com\n logo: Adobe_logo.png\n- name: Ebay\n url: https://www.ebay.co.uk/\n logo: ebay.png\n- name: Sauce Labs\n url: https://saucelabs.com/\n logo: saucelabs.png\n- name: XING\n url: https://www.xing.com/\n logo: xing.png\n- name: Untab\n url: https://www.untab.io/\n logo: untab.png\n- name: Hotstar\n url: https://www.hotstar.com/\n logo: hotstar.jpg\n- name: UBIO\n url: https://ub.io/\n logo: ubio.png\n- name: Softonic\n url: https://www.softonic.com\n logo: softonic.png\n- name: BlaBlaCar\n url: https://www.blablacar.com\n logo: blablacar.png\n- name: FREE NOW\n url: https://free-now.com\n logo: free-now.png\n- name: Amadeus\n url: https://amadeus.com\n logo: amadeus.png\n- name: Blinkit\n url: https://blinkit.com\n logo: blinkit.png\n- name: Tencent\n url: https://github.com/tkestack/tke\n logo: tencent.png\n- name: VEXXHOST, Inc.\n url: https://vexxhost.com\n logo: vexxhost.png\n- name: World Wide Technology\n url: https://www.wwt.com\n logo: wwt.png\n- name: Banzai Cloud\n url: https://banzaicloud.com/\n logo: banzaicloud.png\n- name: Jurumani Solutions\n url: https://www.jurumani.co.za/\n logo: jurumanisolutions.png\n- name: Tangent Solutions\n url: https://www.tangentsolutions.co.za/\n logo: tangentsolutions.png\n- name: Toss\n url: https://toss.im/en\n logo: toss.png\n- name: Ozon\n url: https://www.ozon.ru\n logo: ozon.png\n- name: MALL Group\n url: https://www.mallgroup.com/\n logo: mallgroup.png\n- name: leboncoin\n url: https://www.leboncoin.fr/\n logo: leboncoin.png\n- name: Workfront\n url: https://www.workfront.com/\n logo: workfront.png\n- name: Cloud&Heat\n url: https://cloudandheat.com\n logo: cloudandheat.png\n- name: Hetzner\n url: https://www.hetzner.com\n logo: hetzner.png\n- name: FLAT ZONE s.r.o.\n url: https://www.flatzone.cz\n logo: flatzone.png\n- name: Beat\n url: https://thebeat.co\n logo: thebeat.png\n- name: Mobiuspace\n url: https://www.mobiuspace.com\n logo: mobiuspace.png\n- name: TiDB Cloud\n url: https://tidbcloud.com\n logo: tidbcloud.png\n- name: Talend\n url: https://www.talend.com\n logo: talend.png\n- name: NOS\n url: https://www.nos.pt\n logo: nos.png\n- name: Truphone\n url: https://www.truphone.com\n logo: truphone.png\n- name: de Volksbank\n url: https://www.devolksbank.nl/\n logo: devolksbank.png\n- name: Fullstaq\n url: https://fullstaq.com/\n logo: fullstaq.png\n- name: Wise\n url: https://wise.com\n logo: wise.png\n- name: ByteDance\n url: https://www.bytedance.com/\n logo: bytedance.png\n- name: Joblift\n url: https://joblift.com/\n logo: joblift.png\n- name: Civo\n url: https://civo.com/\n logo: civo.png\n- name: Stackhero\n url: https://www.stackhero.io/\n logo: stackhero.png\n- name: PagBank\n url: https://pagseguro.com.br/\n logo: pagbank.png\n- name: Itaú Unibanco\n url: https://www.itau.com.br/\n logo: itau-unibanco.png\n- name: LabyrinthLabs\n url: https://lablabs.io\n logo: lablabs.png\n- name: Darwinbox Digital Solutions\n url: https://darwinbox.com\n logo: darwinbox.png\n- name: Wehkamp\n url: https://www.wehkamp.nl\n logo: wehkamp.png\n- name: SoundCloud\n url: https://www.soundcloud.com\n logo: soundcloud.png\n- name: Hyperia\n url: https://www.hyperia.sk/\n logo: hyperia.png\n- name: Epidemic Sound\n url: https://www.epidemicsound.com/\n logo: epidemicsound.png\n- name: Kollective\n url: https://www.kollective.com/\n logo: kollective.png\n- name: SumUp\n url: https://sumup.com/\n logo: sumup.png\n- name: TIXnGO\n url: https://www.tixngo.io\n logo: tixngo.png\n- name: Vestiaire Collective\n url: https://vestiairecollective.com/\n logo: vestiairecollective.png\n- name: South China Morning Post (SCMP)\n url: https://www.scmp.com/\n logo: scmp.png\n- name: Authzed\n url: https://authzed.com\n logo: authzed.png\n- name: OYO\n url: https://www.oyorooms.com/\n logo: OYO.png\n- name: Udaan\n url: https://udaan.com/\n logo: udaan.png\n- name: Adfinis\n url: https://adfinis.com/en/\n logo: adfinis.png\n- name: Kakaopay\n url: https://kakaopay.com/?locale=en\n logo: kakaopay.png\n- name: Arlo\n url: https://www.arlo.com/en-us/\n logo: arlo.png\n- name: Grupo MasMovil\n url: https://grupomasmovil.com/en/\n logo: gmasmovil.png\n- name: Open Systems\n url: https://www.open-systems.com\n logo: open-systems.png\n- name: Deno\n url: https://deno.com/deploy\n logo: deno.png\n- name: CarTrade Tech\n url: https://www.cartradetech.com\n logo: cartradetech.png\n- name: DE-CIX\n url: https://www.de-cix.net\n logo: de-cix.png\n- name: Zenduty\n url: https://www.zenduty.com\n logo: zenduty.png\n- name: Banco do Brasil\n url: https://www.bb.com.br/\n logo: bb.png\n- name: Grupo OLX\n url: https://www.olx.com.br/\n logo: grupo-olx.png\n- name: TrueLayer\n url: https://truelayer.com/\n logo: truelayer.png\n- name: Platform Engineers\n url: https://platformengineers.io\n logo: platformengineers.png\n- name: Shield\n url: https://shield.com\n logo: shield.png\n- name: Conclusion Xforce\n url: https://conclusionxforce.com\n logo: conclusion-xforce.png\n- name: Nirmata\n url: https://nirmata.com/\n logo: Nirmata.png\n- name: Synology\n url: https://www.synology.com/\n logo: synology.png\n- name: XNET\n url: https://xnet.company/\n logo: XNET.png\n- name: Logz.io\n url: https://logz.io\n logo: logzio.png\n
|
dataset_sample\yaml\thanos-io_thanos\website\data\adopters.yml
|
adopters.yml
|
YAML
| 6,036 | 0.8 | 0 | 0 |
python-kit
| 864 |
2025-05-15T14:42:37.166139
|
MIT
| false |
de88634d284083d4d900f47a14a41da7
|
plugins:\n - rubocop-rake\n - rubocop-rspec\n\nAllCops:\n NewCops: enable\n TargetRubyVersion: 3.2\n Exclude:\n - Rakefile\n\nStyle/Documentation:\n Enabled: false\n\nNaming/AccessorMethodName:\n Exclude:\n - ruby/rack-routing/app/route_handler.rb\n\nLint/ShadowingOuterLocalVariable:\n Exclude:\n - .tasks/*.rake\n\nMetrics/BlockLength:\n Exclude:\n - .tasks/*.rake\n\nLint/AmbiguousBlockAssociation:\n Exclude:\n - .tasks/*.rake\nLayout/LineLength:\n Exclude:\n - .tasks/*.rake\nMetrics/ParameterLists:\n Exclude:\n - .tasks/*.rake\nStyle/FrozenStringLiteralComment:\n EnforcedStyle: never\n
|
dataset_sample\yaml\the-benchmarker_web-frameworks\.rubocop.yml
|
.rubocop.yml
|
YAML
| 591 | 0.8 | 0 | 0 |
awesome-app
| 429 |
2025-05-04T12:40:58.598891
|
Apache-2.0
| false |
6fdb7ea92b5872ad88d06cddc2452a42
|
coverage:\n precision: 4\n round: down\n range: "65...90"\n\n status:\n project:\n default:\n target: auto\n threshold: 3% # Allow the coverage to drop by threshold %, and posting a success status.\n patch:\n default:\n target: auto\n threshold: 3%\n\ncomment:\n layout: "header, diff, flags"\n behavior: default\n require_changes: false\n\nflag_management:\n default_rules: # the rules that will be followed for any flag added, generally\n carryforward: true\n statuses:\n - type: project\n target: 85%\n - type: patch\n target: 85%\n\n# Ref: https://docs.codecov.com/docs/ignoring-paths\nignore:\n - tests # integration test cases or tools.\n - fuzz # fuzz test cases or tools.\n - components/test_*/**\n - components/*_tests/**\n - components/*/tests\n
|
dataset_sample\yaml\tikv_tikv\codecov.yml
|
codecov.yml
|
YAML
| 806 | 0.95 | 0.027778 | 0.03125 |
awesome-app
| 163 |
2024-11-16T05:28:04.051605
|
Apache-2.0
| false |
d763abda7e4d02f455044247f0fcdacf
|
groups:\n - name: tikv_accelerate\n rules:\n - record: tikv_grpc_msg_duration_seconds:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_grpc_msg_duration_seconds_bucket{instance=~".*", type!="kv_gc"}[1m])) by (le, type))\n - record: tikv_raftstore_event_duration_bucket:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_event_duration_bucket{instance=~".*"}[1m])) by (le, type))\n - record: tikv_thread_cpu_seconds:1m\n expr: sum(rate(tikv_thread_cpu_seconds_total{instance=~".*"}[1m])) by (instance)\n - record: tikv_raftstore_append_log_duration_seconds:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_append_log_duration_seconds_bucket{instance=~".*"}[1m])) by (le, instance))\n - record: tikv_raftstore_raft_process_duration_secs:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_raft_process_duration_secs_bucket{instance=~".*", type='ready'}[1m])) by (le, instance))\n - record: tikv_raftstore_request_wait_time_duration_secs:byins:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_request_wait_time_duration_secs_bucket{instance=~".*"}[1m])) by (le, instance))\n - record: tikv_raftstore_append_log_duration_seconds:p95:1m\n expr: histogram_quantile(0.95, sum(rate(tikv_raftstore_append_log_duration_seconds_bucket{instance=~".*"}[1m])) by (le))\n - record: tikv_raftstore_apply_wait_time_duration_secs:byins:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_apply_wait_time_duration_secs_bucket{instance=~".*"}[1m])) by (le, instance))\n - record: tikv_raftstore_apply_log_duration_seconds:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_apply_log_duration_seconds_bucket{instance=~".*"}[1m])) by (le, instance))\n - record: tikv_raftstore_request_wait_time_duration_secs:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_request_wait_time_duration_secs_bucket{instance=~".*"}[1m])) by (le))\n - record: tikv_raftstore_request_wait_time_duration_secs:p95:1m\n expr: histogram_quantile(0.95, sum(rate(tikv_raftstore_request_wait_time_duration_secs_bucket{instance=~".*"}[1m])) by (le))\n - record: tikv_worker_handled_task:1m\n expr: sum(rate(tikv_worker_handled_task_total{instance=~".*"}[1m])) by (name)\n - record: tikv_engine_num_files_at_level:kv:avg\n expr: avg(tikv_engine_num_files_at_level{instance=~".*", db="kv"}) by (cf, level)\n - record: tikv_engine_num_files_at_level:raft:avg\n expr: avg(tikv_engine_num_files_at_level{instance=~".*", db="raft"}) by (cf, level)\n - record: tikv_pd_request_duration_seconds:avg:1m\n expr: sum(rate(tikv_pd_request_duration_seconds_sum{instance=~".*"}[1m])) by (type) / sum(rate(tikv_pd_request_duration_seconds_count{instance=~".*"}[1m])) by (type)\n - record: tikv_coprocessor_request_wait_seconds:p95:1m\n expr: histogram_quantile(0.95, sum(rate(tikv_coprocessor_request_wait_seconds_bucket{instance=~".*", type="all"}[1m])) by (le, instance,req))\n - record: tikv_grpc_msg_duration_seconds:avg:1m\n expr: sum(rate(tikv_grpc_msg_duration_seconds_sum{instance=~".*"}[1m])) by (type) / sum(rate(tikv_grpc_msg_duration_seconds_count[1m])) by (type)\n - record: tikv_raftstore_apply_wait_time_duration_secs:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_apply_wait_time_duration_secs_bucket{instance=~".*"}[1m])) by (le))\n - record: tikv_raftstore_apply_wait_time_duration_secs:p95:1m\n expr: histogram_quantile(0.95, sum(rate(tikv_raftstore_apply_wait_time_duration_secs_bucket{instance=~".*"}[1m])) by (le))\n - record: tikv_grpc_msg_duration_seconds:1m\n expr: sum(rate(tikv_grpc_msg_duration_seconds_count{instance=~".*", type!="kv_gc"}[1m])) by (instance,type)\n - record: tikv_raftstore_snapshot_duration_seconds:p99:1m\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_snapshot_duration_seconds_bucket{instance=~".*", type="apply"}[1m])) by (le))\n - record: tikv_worker_pending_task:sum\n expr: sum(tikv_worker_pending_task_total{instance=~".*"}) by (name)\n - record: tikv_coprocessor_request_duration_seconds:1m\n expr: sum(rate(tikv_coprocessor_request_duration_seconds_bucket{instance=~".*"}[1m])) by (le)\n - record: tikv_futurepool_pending_task:1m\n expr: sum(avg_over_time(tikv_futurepool_pending_task_total{instance=~".*"}[1m])) by (name)\n - record: tikv_storage_engine_async_request:1m\n expr: sum(rate(tikv_storage_engine_async_request_total{instance=~".*", status!~"all|success"}[1m])) by (status)\n - record: tikv_thread_cpu_seconds_nogrpc:1m\n expr: sum(rate(tikv_thread_cpu_seconds_total{instance=~".*", name=~"grpc.*"}[1m])) by (instance)\n
|
dataset_sample\yaml\tikv_tikv\metrics\alertmanager\tikv.accelerate.rules.yml
|
tikv.accelerate.rules.yml
|
YAML
| 4,706 | 0.7 | 0 | 0 |
vue-tools
| 183 |
2025-07-08T17:47:26.369590
|
Apache-2.0
| false |
095d4a14950022ad75dc0aeb1cfe18a6
|
groups:\n- name: alert.rules\n rules:\n - alert: TiKV_critical_error\n expr: sum(rate(tikv_critical_error_total[1m])) BY (type, instance) > 0\n # without the for clause will become active on the first evaluation.\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: sum(rate(tikv_critical_error_total[1m])) BY (type, instance) > 0\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV encounters critical error\n\n - alert: TiKV_memory_used_too_fast\n expr: process_resident_memory_bytes{job=~"tikv",instance=~".*"} - (process_resident_memory_bytes{job=~"tikv",instance=~".*"} offset 5m) > 5*1024*1024*1024\n for: 5m\n labels:\n env: ENV_LABELS_ENV\n level: emergency\n expr: process_resident_memory_bytes{job=~"tikv",instance=~".*"} - (process_resident_memory_bytes{job=~"tikv",instance=~".*"} offset 5m) > 5*1024*1024*1024\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV memory used too fast\n\n - alert: TiKV_GC_can_not_work\n expr: sum(increase(tikv_gcworker_gc_tasks_vec{task="gc"}[1d])) < 1 and (sum(increase(tikv_gc_compaction_filter_perform[1d])) < 1 and sum(increase(tikv_engine_event_total{db="kv", cf="write", type="compaction"}[1d])) >= 1)\n for: 5m\n labels:\n env: ENV_LABELS_ENV\n level: emergency\n expr: sum(increase(tikv_gcworker_gc_tasks_vec{task="gc"}[1d])) < 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV GC can not work\n\n - alert: TiKV_server_report_failure_msg_total\n expr: sum(rate(tikv_server_report_failure_msg_total{type="unreachable"}[10m])) BY (store_id) > 10\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: sum(rate(tikv_server_report_failure_msg_total{type="unreachable"}[10m])) BY (store_id) > 10\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV server_report_failure_msg_total error\n\n - alert: TiKV_channel_full_total\n expr: sum(rate(tikv_channel_full_total[10m])) BY (type, instance) > 0\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: sum(rate(tikv_channel_full_total[10m])) BY (type, instance) > 0\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV channel full\n\n - alert: TiKV_write_stall\n expr: delta( tikv_engine_write_stall[10m]) > 0\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: delta( tikv_engine_write_stall[10m]) > 0\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV write stall\n\n - alert: TiKV_maybe_write_stall\n expr: max(tikv_scheduler_l0_avg) by (instance) > 80\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: max(tikv_scheduler_l0_avg) by (instance) > 80\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV the average number of L0 files exceeds 80\n\n - alert: TiKV_raft_log_lag\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_log_lag_bucket[1m])) by (le, instance)) > 5000\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_log_lag_bucket[1m])) by (le, instance)) > 5000\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV raftstore log lag more than 5000\n\n - alert: TiKV_async_request_snapshot_duration_seconds\n expr: histogram_quantile(0.99, sum(rate(tikv_storage_engine_async_request_duration_seconds_bucket{type="snapshot"}[1m])) by (le, instance, type)) > 1\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: histogram_quantile(0.99, sum(rate(tikv_storage_engine_async_request_duration_seconds_bucket{type="snapshot"}[1m])) by (le, instance, type)) > 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV async request snapshot duration seconds more than 1s\n\n - alert: TiKV_async_request_write_duration_seconds\n expr: histogram_quantile(0.99, sum(rate(tikv_storage_engine_async_request_duration_seconds_bucket{type="write"}[1m])) by (le, instance, type)) > 1\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: histogram_quantile(0.99, sum(rate(tikv_storage_engine_async_request_duration_seconds_bucket{type="write"}[1m])) by (le, instance, type)) > 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV async request write duration seconds more than 1s\n\n - alert: TiKV_coprocessor_request_wait_seconds\n expr: histogram_quantile(0.9999, sum(rate(tikv_coprocessor_request_wait_seconds_bucket{type="all"}[1m])) by (le, instance, req)) > 10\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: histogram_quantile(0.9999, sum(rate(tikv_coprocessor_request_wait_seconds_bucket{type="all"}[1m])) by (le, instance, req)) > 10\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV coprocessor request wait seconds more than 10s\n\n - alert: TiKV_raftstore_thread_cpu_seconds_total\n expr: sum(rate(tikv_thread_cpu_seconds_total{name=~"(raftstore|rs)_.*"}[1m])) by (instance) > 1.6\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: sum(rate(tikv_thread_cpu_seconds_total{name=~"(raftstore|rs)_.*"}[1m])) by (instance) > 1.6\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV raftstore thread CPU seconds is high\n\n - alert: TiKV_raft_append_log_duration_secs\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_append_log_duration_seconds_bucket[1m])) by (le, instance)) > 1\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_append_log_duration_seconds_bucket[1m])) by (le, instance)) > 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV_raft_append_log_duration_secs\n\n - alert: TiKV_raft_apply_log_duration_secs\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_apply_log_duration_seconds_bucket[1m])) by (le, instance)) > 1\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_apply_log_duration_seconds_bucket[1m])) by (le, instance)) > 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV_raft_apply_log_duration_secs\n\n - alert: TiKV_scheduler_latch_wait_duration_seconds\n expr: histogram_quantile(0.99, sum(rate(tikv_scheduler_latch_wait_duration_seconds_bucket[1m])) by (le, instance, type)) > 1\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: histogram_quantile(0.99, sum(rate(tikv_scheduler_latch_wait_duration_seconds_bucket[1m])) by (le, instance, type)) > 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV scheduler latch wait duration seconds more than 1s\n\n - alert: TiKV_thread_apply_worker_cpu_seconds\n expr: max(rate(tikv_thread_cpu_seconds_total{name="apply_.*"}[1m])) by (instance) > 0.9\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: max(rate(tikv_thread_cpu_seconds_total{name="apply_.*"}[1m])) by (instance) > 0.9\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV thread apply worker cpu seconds is high\n\n - alert: TiDB_tikvclient_gc_action_fail\n expr: sum(increase(tidb_tikvclient_gc_action_result{type="fail"}[1m])) > 10\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: critical\n expr: sum(increase(tidb_tikvclient_gc_action_result{type="fail"}[1m])) > 10\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiDB_tikvclient_gc_action_fail\n\n - alert: TiKV_leader_drops\n expr: delta(tikv_pd_heartbeat_tick_total{type="leader"}[30s]) < -10\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: delta(tikv_pd_heartbeat_tick_total{type="leader"}[30s]) < -10\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV leader drops\n\n - alert: TiKV_raft_process_ready_duration_secs\n expr: histogram_quantile(0.999, sum(rate(tikv_raftstore_raft_process_duration_secs_bucket{type='ready'}[1m])) by (le, instance, type)) > 2\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: histogram_quantile(0.999, sum(rate(tikv_raftstore_raft_process_duration_secs_bucket{type='ready'}[1m])) by (le, instance, type)) > 2\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV_raft_process_ready_duration_secs\n\n - alert: TiKV_raft_process_tick_duration_secs\n expr: histogram_quantile(0.999, sum(rate(tikv_raftstore_raft_process_duration_secs_bucket{type='tick'}[1m])) by (le, instance, type)) > 2\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: histogram_quantile(0.999, sum(rate(tikv_raftstore_raft_process_duration_secs_bucket{type='tick'}[1m])) by (le, instance, type)) > 2\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV_raft_process_tick_duration_secs\n\n - alert: TiKV_scheduler_context_total\n expr: abs(delta( tikv_scheduler_contex_total[5m])) > 1000\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: abs(delta( tikv_scheduler_contex_total[5m])) > 1000\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV scheduler context total\n\n - alert: TiKV_scheduler_command_duration_seconds\n expr: histogram_quantile(0.99, sum(rate(tikv_scheduler_command_duration_seconds_bucket[1m])) by (le, instance, type) / 1000) > 1\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: histogram_quantile(0.99, sum(rate(tikv_scheduler_command_duration_seconds_bucket[1m])) by (le, instance, type) / 1000) > 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV scheduler command duration seconds more than 1s\n\n - alert: TiKV_coprocessor_pending_request\n expr: delta( tikv_coprocessor_pending_request[10m]) > 5000\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: delta( tikv_coprocessor_pending_request[10m]) > 5000\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV pending {{ $labels.type }} request is high\n\n - alert: TiKV_coprocessor_cpu_util\n expr: sum(rate(tikv_thread_cpu_seconds_total{name=~"cop_.*"}[1m])) by (instance) / (count(tikv_thread_cpu_seconds_total{name=~"cop_.*"}) by (instance) * 0.9) >= 1\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: sum(rate(tikv_thread_cpu_seconds_total{name=~"cop_.*"}[1m])) by (instance) / (count(tikv_thread_cpu_seconds_total{name=~"cop_.*"}) by (instance) * 0.9) >= 1\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV coprocessor CPU utilization exceeds 90%\n\n - alert: TiKV_pending_task\n expr: sum(tikv_worker_pending_task_total) BY (instance,name) > 1000\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: sum(tikv_worker_pending_task_total) BY (instance,name) > 1000\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV pending task too much\n\n - alert: TiKV_low_space\n expr: sum(tikv_store_size_bytes{type="available"}) by (instance) / sum(tikv_store_size_bytes{type="capacity"}) by (instance) < 0.2\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: sum(tikv_store_size_bytes{type="available"}) by (instance) / sum(tikv_store_size_bytes{type="capacity"}) by (instance) < 0.2\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV available disk space too low\n\n - alert: TiKV_approximate_region_size\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_region_size_bucket[1m])) by (le)) > 1073741824\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: histogram_quantile(0.99, sum(rate(tikv_raftstore_region_size_bucket[1m])) by (le)) > 1073741824\n annotations:\n description: 'cluster: ENV_LABELS_ENV, type: {{ $labels.type }}, instance: {{ $labels.instance }}, values: {{ $value }}'\n value: '{{ $value }}'\n summary: TiKV approximate region size is more than 1GB\n\n - alert: TiKV_node_restart\n expr: changes(process_start_time_seconds{job="tikv"}[5m]) > 0\n for: 1m\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: changes(process_start_time_seconds{job="tikv"}[5m]) > 0\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV server has been restarted\n\n - alert: TiKV_cpu_quota\n expr: irate(process_cpu_seconds_total{job="tikv"}[30s]) / tikv_server_cpu_cores_quota > 0.8\n for: 45s\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: irate(process_cpu_seconds_total{job="tikv"}[30s]) / tikv_server_cpu_cores_quota > 0.8\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV CPU usage is over 80% of CPU quota\n\n - alert: TiKV_memory_quota\n expr: process_resident_memory_bytes{job="tikv"} / tikv_server_memory_quota_bytes > 0.8\n for: 15s\n labels:\n env: ENV_LABELS_ENV\n level: warning\n expr: process_resident_memory_bytes{job="tikv"} / tikv_server_memory_quota_bytes > 0.8\n annotations:\n description: 'cluster: ENV_LABELS_ENV, instance: {{ $labels.instance }}, values:{{ $value }}'\n value: '{{ $value }}'\n summary: TiKV memory usage is over 80% of memory quota\n
|
dataset_sample\yaml\tikv_tikv\metrics\alertmanager\tikv.rules.yml
|
tikv.rules.yml
|
YAML
| 16,175 | 0.8 | 0.082873 | 0.003003 |
vue-tools
| 518 |
2024-10-03T07:00:36.310088
|
GPL-3.0
| false |
78e2892f34234384013fee92b301ea01
|
# If it says that commit YAML is invalid again,\n# validate it with:\n# curl --data-binary @.codecov.yml https://codecov.io/validate\n# More docs: https://docs.codecov.com/docs/codecov-yaml#repository-yaml\nignore:\n - "test/src"\ncoverage:\n status:\n project:\n default:\n flags:\n - pr\n
|
dataset_sample\yaml\timescale_timescaledb\.codecov.yml
|
.codecov.yml
|
YAML
| 304 | 0.8 | 0 | 0.333333 |
awesome-app
| 648 |
2024-09-18T14:38:15.542913
|
BSD-3-Clause
| false |
99f7e6199293473e38b0eb8d56c20261
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2023.\n\n# This is a configuration file for the labeler github action.\n# The labeler action runs uses this configuration to automatically\n# label each PR submitted to the Tock repository, by applying labels\n# that match the corresponding glob paths. More details on the rules\n# that apply to this configuration file can be found in the documentation\n# at https://github.com/actions/labeler\n\nstm32:\n - chips/stm*/**/*\n\nnrf:\n - chips/nrf5*/**/*\n\nsam4l:\n - chips/sam4l/**/*\n\nHIL:\n - kernel/src/hil/*\n\nrisc-v:\n - arch/rv32i/**/*\n - arch/riscv/**/*\n\ntock-libraries:\n - libraries/**/*\n\nWG-OpenTitan:\n - boards/opentitan/**/*\n - chips/earlgrey/**/*\n - chips/lowrisc/**/*\n - doc/wg/opentitan/**/*\n\nWG-Network:\n - capsules/extra/src/ble_advertising_driver.rs\n - capsules/extra/src/can.rs\n - capsules/extra/src/ieee802154/**/*\n - capsules/extra/src/net/**/*\n - capsules/extra/src/rf233.rs\n - capsules/extra/src/rf233_const.rs\n - chips/apollo3/src/ble.rs\n - chips/litex/src/liteeth.rs\n - chips/nrf52/src/ble_radio.rs\n - chips/nrf52840/src/ieee802154_radio.rs\n - chips/stm32f429zi/src/can_registers.rs\n - chips/stm32f4xx/src/can.rs\n - chips/virtio/src/devices/virtio_net.rs\n - doc/wg/network/**/*\n - kernel/src/hil/ble_advertising.rs\n - kernel/src/hil/can.rs\n - kernel/src/hil/radio.rs\n\n# add kernel label unless already covered by hil label\nkernel:\n - any: ['kernel/**/*', '!kernel/src/hil/*']\n\n# add documentation label only if all changes are in doc/\ndocumentation:\n - all: ['doc/**/*']\n\ncomponent:\n - boards/components/**/*\n
|
dataset_sample\yaml\tock_tock\.github\labeler.yml
|
labeler.yml
|
YAML
| 1,689 | 0.8 | 0.030769 | 0.207547 |
vue-tools
| 493 |
2025-03-06T15:04:33.916616
|
BSD-3-Clause
| false |
fc8c79b414cfbe68722cec29cc7b56d0
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2023.\n\n# This workflow calculates size diffs for the compiled binary of each supported tock board\n\nname: Benchmarks\n\n# Controls when the action will run. Triggers the workflow on pull request\n# events but only for the master branch\non:\n pull_request:\n branches: master\n\n# A workflow run is made up of one or more jobs that can run sequentially or in parallel\n# If you add additional jobs, remember to add them to bors.toml\npermissions:\n contents: read\n\njobs:\n benchmarks:\n # The type of runner that the job will run on\n runs-on: ubuntu-latest\n\n # Steps represent a sequence of tasks that will be executed as part of the job\n steps:\n - uses: actions/checkout@v4\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: '3.x'\n - name: Install dependencies\n run: |\n python -m pip install --upgrade pip setuptools wheel\n pip install --user cxxfilt\n sudo apt install llvm\n - name: size report\n run: |\n ./tools/github_actions_size_changes.sh\n
|
dataset_sample\yaml\tock_tock\.github\workflows\benchmarks.yml
|
benchmarks.yml
|
YAML
| 1,212 | 0.8 | 0.051282 | 0.30303 |
awesome-app
| 156 |
2025-03-17T22:04:09.960228
|
BSD-3-Clause
| false |
0779831a72db342daf466c76bf9d10f0
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2024.\n\nname: tock-nightly-ci\n\non:\n schedule:\n - cron: "0 0 * * *"\n\nenv:\n TERM: xterm # Makes tput work in actions output\n\n# A workflow run is made up of one or more jobs that can run sequentially or in parallel\n# If you add additional jobs, remember to add them to bors.toml\npermissions:\n contents: read\n issues: write\n\njobs:\n ci-build:\n # Do not run job on forks\n if: github.repository == 'tock/tock'\n\n strategy:\n matrix:\n os: [ubuntu-latest, macos-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - uses: actions/checkout@v4\n\n - name: ci-job-syntax\n run: make ci-job-syntax\n - name: ci-job-compilation\n run: make ci-job-compilation\n - name: ci-job-debug-support-targets\n run: make ci-job-debug-support-targets\n\n - name: ci-job-collect-artifacts\n run: make ci-job-collect-artifacts\n - name: upload-build-artifacts\n uses: actions/upload-artifact@v4\n with:\n name: build-artifacts\n path: tools/ci-artifacts\n\n ci-tests:\n # Do not run job on forks\n if: github.repository == 'tock/tock'\n\n strategy:\n matrix:\n os: [ubuntu-latest, macos-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - name: Update package repositories\n run: |\n sudo apt update\n if: matrix.os == 'ubuntu-latest'\n - name: Install dependencies for ubuntu-latest\n run: |\n sudo apt install libudev-dev libzmq3-dev\n if: matrix.os == 'ubuntu-latest'\n - name: Install dependencies for macos-latest\n run: |\n brew install zeromq\n if: matrix.os == 'macos-latest'\n - uses: actions/checkout@v4\n - name: ci-job-libraries\n run: make ci-job-libraries\n - name: ci-job-archs\n run: make ci-job-archs\n - name: ci-job-kernel\n run: make ci-job-kernel\n - name: ci-job-chips\n run: make ci-job-chips\n - name: ci-job-tools\n run: make ci-job-tools\n - name: Create Issue on Failed workflow\n if: failure()\n uses: dacbd/create-issue-action@main\n with:\n token: ${{ github.token }}\n title: Nightly CI failed\n body: |\n ### Context\n [Failed Run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n [Codebase](https://github.com/${{ github.repository }}/tree/${{ github.sha }})\n Workflow name - `${{ github.workflow }}`\n Job - `${{ github.job }}`\n status - `${{ job.status }}`\n assignees: tock/core-wg\n
|
dataset_sample\yaml\tock_tock\.github\workflows\ci-nightly.yml
|
ci-nightly.yml
|
YAML
| 2,739 | 0.8 | 0.085106 | 0.097561 |
awesome-app
| 231 |
2024-12-05T01:48:50.102859
|
BSD-3-Clause
| false |
0b511c0a76af7ee3c744a3286ce1af16
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2023.\n\n# This workflow contains all tock-ci, separated into jobs\n\nname: tock-ci\nenv:\n TERM: xterm # Makes tput work in actions output\n\n# Controls when the action will run. Triggers the workflow on push or pull request\n# events but only for the master branch\non:\n push: # Run CI for all branches except GitHub merge queue tmp branches\n branches-ignore:\n - "gh-readonly-queue/**"\n pull_request: # Run CI for PRs on any branch\n merge_group: # Run CI for the GitHub merge queue\n\n# A workflow run is made up of one or more jobs that can run sequentially or in parallel\n# If you add additional jobs, remember to add them to bors.toml\npermissions:\n contents: read\n\njobs:\n ci-format:\n strategy:\n matrix:\n os: [ubuntu-latest]\n # The type of runner that the job will run on\n runs-on: ${{ matrix.os }}\n\n # Steps represent a sequence of tasks that will be executed as part of the job\n steps:\n - uses: actions/checkout@v4\n - uses: actions/setup-node@v3\n - name: ci-job-format\n run: make ci-job-format\n - name: ci-job-markdown-toc\n run: make ci-job-markdown-toc\n - name: ci-job-readme-check\n run: make ci-job-readme-check\n\n ci-clippy:\n strategy:\n matrix:\n os: [ubuntu-latest]\n # The type of runner that the job will run on\n runs-on: ${{ matrix.os }}\n\n # Steps represent a sequence of tasks that will be executed as part of the job\n steps:\n - uses: actions/checkout@v4\n - name: ci-job-clippy\n run: make ci-job-clippy\n\n ci-build:\n strategy:\n matrix:\n os: [ubuntu-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - uses: actions/checkout@v4\n - name: ci-job-syntax\n run: make ci-job-syntax\n - name: ci-job-compilation\n run: make ci-job-compilation\n - name: ci-job-msrv\n run: make ci-job-msrv\n - name: ci-job-debug-support-targets\n run: make ci-job-debug-support-targets\n - name: ci-job-collect-artifacts\n run: make ci-job-collect-artifacts\n - name: upload-build-artifacts\n uses: actions/upload-artifact@v4\n with:\n name: build-artifacts\n path: tools/ci-artifacts\n\n ci-tests:\n strategy:\n matrix:\n os: [ubuntu-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - name: Update package repositories\n run: |\n sudo apt update\n if: matrix.os == 'ubuntu-latest'\n - name: Install dependencies for ubuntu-latest\n run: |\n sudo apt install libudev-dev libzmq3-dev\n if: matrix.os == 'ubuntu-latest'\n - uses: actions/checkout@v4\n - name: ci-job-libraries\n run: make ci-job-libraries\n - name: ci-job-archs\n run: make ci-job-archs\n - name: ci-job-kernel\n run: make ci-job-kernel\n - name: ci-job-capsules\n run: make ci-job-capsules\n - name: ci-job-chips\n run: make ci-job-chips\n - name: ci-job-tools\n run: make ci-job-tools\n - name: ci-job-cargo-test-build\n run: make ci-job-cargo-test-build\n\n ci-qemu:\n strategy:\n matrix:\n os: [ubuntu-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - name: Update package repositories\n run: |\n sudo apt update\n - name: Install dependencies\n continue-on-error: true\n run: |\n sudo apt install meson libglib2.0-dev\n - uses: actions/checkout@v4\n - name: ci-job-qemu\n run: make ci-job-qemu\n
|
dataset_sample\yaml\tock_tock\.github\workflows\ci.yml
|
ci.yml
|
YAML
| 3,726 | 0.8 | 0.054688 | 0.105263 |
react-lib
| 314 |
2025-01-28T21:29:07.238037
|
BSD-3-Clause
| false |
7c4746258a496c78006895c42a7063ac
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2023.\n\nname: "Pull Request Labeler"\non:\n- pull_request_target\n\npermissions:\n contents: read\n\njobs:\n triage:\n permissions:\n contents: read # for actions/labeler to determine modified files\n pull-requests: write # for actions/labeler to add labels to PRs\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n with:\n repo-token: "${{ secrets.GITHUB_TOKEN }}"\n
|
dataset_sample\yaml\tock_tock\.github\workflows\labeler.yml
|
labeler.yml
|
YAML
| 549 | 0.8 | 0.095238 | 0.166667 |
vue-tools
| 202 |
2024-03-24T00:43:47.672121
|
GPL-3.0
| false |
6c6709371410aaae51695b275f4da99e
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2023.\n\n# This workflow contains the litex-ci-runner job, which uses the LiteX Verilated\n# simulation to run a Tock kernel and perform various tests using libtock-c\n# example applications.\n\nname: litex-sim-ci\nenv:\n TERM: xterm # Makes tput work in actions output\n\n# Controls when the action will run. Triggers the workflow on push or pull\n# request events but only for the master branch\non:\n push: # Run CI for all branches except GitHub merge queue tmp branches\n branches-ignore:\n - "gh-readonly-queue/**"\n pull_request: # Run CI for PRs on any branch\n merge_group: # Run CI for the GitHub merge queue\n\n# A workflow run is made up of one or more jobs that can run sequentially or in parallel\n# If you add additional jobs, remember to add them to bors.toml\npermissions:\n contents: read\n\njobs:\n litex-sim-ci:\n strategy:\n matrix:\n os: [ubuntu-22.04]\n\n # The type of runner that the job will run on\n runs-on: ${{ matrix.os }}\n\n # Steps represent a sequence of tasks that will be executed as part of the job\n steps:\n # Checkout the Tock repo, needs to happen at the beginning given\n # that other steps (such as the Rust toolchain) depend on files\n # in this repo.\n - name: Checkout the current repository\n uses: actions/checkout@v4\n\n # Install basic packages required for the GitHub actions workflow\n - name: Update packages and install dependencies\n run: |\n sudo apt update\n sudo apt install python3-pip python3-venv gcc-riscv64-unknown-elf \\n verilator libevent-dev libjson-c-dev libz-dev libzmq3-dev\n\n # Install elf2tab to be able to build userspace apps\n - name: Install elf2tab\n run: |\n cargo install [email protected]\n\n # Install tockloader, which is used to prepare binaries with userspace\n # applications.\n - name: Install tockloader\n run: |\n pip3 install tockloader==1.13.0\n\n # Clone tock-litex support repository under ./tock-litex, check out the\n # targeted release.\n - name: Checkout the tock-litex repository\n uses: actions/checkout@v4\n with:\n repository: lschuermann/tock-litex\n # The pinned revision is different from the targeted release as\n # documented in the LiteX boards, as the CI requires special patches\n # to LiteX for interacting with the simulation:\n ref: 2024011101-tock-ci-1\n path: tock-litex\n\n # Install all of the required Python packages from the tock-litex'\n # requirements.txt file\n - name: Install Python packages pinned by the tock-litex revision\n run: |\n pushd tock-litex\n # Migen is the DSL which the LiteX ecosystem uses as its\n # hardware-description language. It effectively provides a set of\n # Python classes and constructs which can be translated into Verilog.\n # It is not a package of the LiteX ecosystem, and thus not in the\n # requirements.txt, but it is required to be present on the system.\n # It should not require any specific or patched version.\n pip3 install migen==0.9.2\n pip3 install -r requirements.txt\n popd\n\n # Build the LiteX simulator Tock kernel. This kernel is never touched, the\n # litex-ci-runner will use its own temporary flash files.\n - name: Build the LiteX simulator Tock kernel\n run: |\n pushd boards/litex/sim\n make\n popd\n\n # Revision to checkout defined in the main tock repository in\n # .libtock_c_ci_rev\n - name: Checkout libtock-c CI revision\n uses: actions/checkout@v4\n with:\n repository: tock/libtock-c\n # Pins a libtock-c revision for LiteX CI tests. In case of\n # bugs fixed in libtock-c, backwards-incompatible changes in\n # Tock or new tests this might need to be updated.\n #\n # libtock-c of Mon Dec 9 21:35:38 2024 +0000\n ref: 820579455300762558718510d89960fd693131c1\n path: libtock-c\n\n - name: Build libtock-c apps\n run: |\n # We only need to build for a single target, but at multiple flash and\n # memory addresses such that tockloader can place the non-PIC apps\n # into the kernel binary properly.\n export TOCK_TARGETS="\\n rv32imc|rv32imc.0x00080080.0x40008000|0x00080080|0x40008000\n rv32imc|rv32imc.0x00088080.0x40010000|0x00088080|0x40010000"\n export LIBTOCK_C_APPS="\\n c_hello \\n tests/console/console_timeout \\n tests/mpu/mpu_walk_region \\n tests/printf_long \\n rot13_service \\n rot13_client \\n tests/console/console_recv_short \\n tests/console/console_recv_long"\n pushd libtock-c/examples\n for APP in $LIBTOCK_C_APPS; do\n make -C "$APP"\n done\n popd\n\n # Run the LiteX simulation with required options for Tock\n - name: Run various tests in the LiteX simulation using the litex-ci-runner\n run: |\n pushd tools/litex-ci-runner\n cargo run\n
|
dataset_sample\yaml\tock_tock\.github\workflows\litex_sim.yml
|
litex_sim.yml
|
YAML
| 5,321 | 0.95 | 0.072464 | 0.368852 |
vue-tools
| 489 |
2024-09-13T05:40:43.667489
|
Apache-2.0
| false |
d35fe1a8df6c974a3f918755910b5b0d
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2023.\n\n# Netlify's own CI builds both deploy previews for PRs, as well as the\n# production deploy for the master branch. We use this workflow purely as we\n# can't have Netlify build the wildcard gh-readonly-queue/* branches. This\n# workflow thus ensures that docs build successfully (albeit not in the exact\n# same environment as Netlify's).\n#\n# See issue #3428 for more information.\n\nname: docs-ci\nenv:\n TERM: dumb # Identical to Netlify build environment\n\non:\n merge_group:\n\npermissions:\n contents: read\n\njobs:\n ci-docs:\n strategy:\n matrix:\n os: [ubuntu-latest]\n runs-on: ${{ matrix.os }}\n\n steps:\n - uses: actions/checkout@v4\n - # This also sets up the rustup environment\n name: ci-netlify-build\n run: tools/netlify-build.sh\n
|
dataset_sample\yaml\tock_tock\.github\workflows\mergequeue_docs.yml
|
mergequeue_docs.yml
|
YAML
| 921 | 0.8 | 0.088235 | 0.357143 |
python-kit
| 869 |
2025-05-06T00:45:34.068389
|
Apache-2.0
| false |
525dd1884af530791535a0bd5e35eabe
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2024.\n\nname: "Tockbot"\n\non:\n schedule:\n - cron: "0 0 * * *"\n workflow_dispatch:\n inputs:\n dispatch-job:\n description: 'Which job to execute (choose between "all", "maint-nightly")'\n required: true\n default: 'all'\n dry-run:\n description: 'Whether to execute the jobs as dry-run'\n required: true\n default: true\n\njobs:\n dispatcher:\n runs-on: ubuntu-latest\n\n # Do not run job on forks\n if: github.repository == 'tock/tock'\n\n # This job determines which other jobs should be run:\n outputs:\n run-maint-nightly: ${{ steps.dispatch-logic.outputs.run-maint-nightly }}\n dry-run: ${{ steps.dispatch-logic.outputs.dry-run }}\n\n steps:\n # On pushes we want to check whether any changes have been made\n # to the Tockbot code base. Disabled for now:\n - uses: actions/checkout@v4\n\n # Dispatcher business logic:\n - name: Dispatch Tockbot Jobs\n id: dispatch-logic\n env:\n DISPATCH_JOB: ${{ github.event.inputs.dispatch-job }}\n DISPATCH_DRY_RUN: ${{ github.event.inputs.dry-run }}\n run: |\n if [ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]; then\n if [ "$DISPATCH_DRY_RUN" == "true" ]; then\n echo "dry-run=true" >> $GITHUB_OUTPUT\n elif [ "$DISPATCH_DRY_RUN" == "false" ]; then\n echo "dry-run=false" >> $GITHUB_OUTPUT\n else\n echo "Error: dry-run not a boolean: \"$DISPATCH_DRY_RUN\"" >&2\n exit 1\n fi\n\n if [ "$DISPATCH_JOB" == "all" ]; then\n echo "run-maint-nightly=true" >> $GITHUB_OUTPUT\n elif [ "$DISPATCH_JOB" == "maint-nightly" ]; then\n echo "run-maint-nightly=true" >> $GITHUB_OUTPUT\n else\n echo "Error: unknown job \"$DISPATCH_JOB\"" >&2\n exit 1\n fi\n elif [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then\n echo "dry-run=true" >> $GITHUB_OUTPUT\n echo "run-maint-nightly=true" >> $GITHUB_OUTPUT\n elif [ "$GITHUB_EVENT_NAME" == "schedule" ]; then\n echo "dry-run=false" >> $GITHUB_OUTPUT\n echo "run-maint-nightly=true" >> $GITHUB_OUTPUT\n else\n echo "Error: unknown event name \"$GITHUB_EVENT_NAME\"" >&2\n exit 1\n fi\n\n maint-nightly:\n runs-on: ubuntu-latest\n\n # Only run this job if the dispatcher determined to schedule the\n # "maint-nightly" or "dry-run" jobs:\n needs: dispatcher\n if: ${{ needs.dispatcher.outputs.run-maint-nightly == 'true' && needs.dispatcher.outputs.dry-run != 'true' }}\n\n permissions:\n # Give GITHUB_TOKEN write permissions to modify PRs and issues:\n pull-requests: write\n issues: write\n\n steps:\n # Requires a tock checkout to run from:\n - uses: actions/checkout@v4\n\n # Setup Python and install dependencies:\n - uses: actions/setup-python@v5\n - name: Install Python Dependencies\n run: pip install -r tools/tockbot/requirements.txt\n\n # Run nightly tockbot maintenance:\n - name: Nightly Tockbot Maintenance\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n DRY_RUN: ${{ needs.dispatcher.outputs.dry-run == 'true' && '-n' || '' }}\n run: |\n cd tools/tockbot/\n ./tockbot.py -v $DRY_RUN maint-nightly -c ./maint_nightly.yaml\n\n # We'd like to avoid duplicating this, either by using conditionals in the\n # permissions key, or by using YAML anchors, neither of which are supported by\n # GH Actions...\n maint-nightly-dry-run:\n runs-on: ubuntu-latest\n\n # Only run this job if the dispatcher determined to schedule the\n # "maint-nightly" or "dry-run" jobs:\n needs: dispatcher\n if: ${{ needs.dispatcher.outputs.run-maint-nightly == 'true' && needs.dispatcher.outputs.dry-run == 'true' }}\n\n permissions:\n # Dry-run, read-only access:\n pull-requests: read\n issues: read\n\n steps:\n # Requires a tock checkout to run from:\n - uses: actions/checkout@v4\n\n # Setup Python and install dependencies:\n - uses: actions/setup-python@v5\n - name: Install Python Dependencies\n run: pip install -r tools/tockbot/requirements.txt\n\n # Run nightly tockbot maintenance:\n - name: Nightly Tockbot Maintenance\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n DRY_RUN: ${{ needs.dispatcher.outputs.dry-run == 'true' && '-n' || '' }}\n run: |\n cd tools/tockbot/\n ./tockbot.py -v $DRY_RUN maint-nightly -c ./maint_nightly.yaml\n
|
dataset_sample\yaml\tock_tock\.github\workflows\tockbot-nightly.yml
|
tockbot-nightly.yml
|
YAML
| 4,749 | 0.95 | 0.065693 | 0.196581 |
python-kit
| 119 |
2024-07-31T23:25:59.612914
|
BSD-3-Clause
| false |
d5594e597e3d974cfb75f28e80882997
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2024.\n\n# This workflow contains all Treadmill-based hardware CI jobs.\n#\n# Treadmill is a distributed hardware testbed developed within the Tock OS\n# project. For more information on Treadmill, have a look at its documentation\n# [1] or repository [2].\n#\n# This workflow is based on the Treadmill GitHub Actions integration guide [3].\n# In addition, it features the ability to run multiple Treadmill jobs and\n# test-execute stages through GitHub Action's job matrices, and uses a GitHub\n# environment to allow deployments with access to secrets for select PRs.\n#\n# [1]: https://book.treadmill.ci/\n# [2]: https://github.com/treadmill-tb/treadmill\n# [3]: https://book.treadmill.ci/user-guide/github-actions-integration.html\n\nname: treadmill-ci\n\nenv:\n TERM: xterm # Makes tput work in actions output\n\n# Controls when the action will run. Triggers the workflow on pull request and\n# merge group checks:\n#\n# KEEP IN SYNC WITH `environment:` ATTRIBUTE BELOW:\non:\n push:\n branches:\n - master\n # Add any additional branches you want to include\n # - dev/test_ci_branch\n\n # Pull requests from forks will not have access to the required GitHub API\n # secrets below, even if they are using an appropriate deployment environment\n # and the workflow runs have been approved according to this environment's\n # rules. We don't know whether this is a bug on GitHub's end or deliberate.\n #\n # Either way, for now we disable this workflow to run on PRs until we have\n # an API proxy that securely performs these GitHub API calls (adding runners\n # and starting Treadmill jobs with those runner registration tokens), which\n # allows this workflow to run without access to repository secrets.\n #\n # However, because GitHub's merge queues don't allow to differentiate required\n # checks for *entering* the merge queue from those that are required to *pass*\n # it, we also can't disable this trigger entirely. Instead, we use a selector\n # to avoid running any actual checks on this trigger, while still technically\n # succeeding for PRs.\n pull_request:\n\n merge_group: # Run CI for the GitHub merge queue\n\n # Manually dispatch for a specific branch (will require approval\n # through the treadmill-ci-merged environment:\n workflow_dispatch:\n inputs:\n tock-kernel-ref:\n description: 'Ref (revision/branch/tag) of the upstream Tock repo to test'\n required: true\n default: 'master'\n libtock-c-ref:\n description: 'Ref (revision/branch/tag) of the upstream libtock-c repo to test'\n required: true\n default: 'master'\n tests-json:\n description: 'tests-json value passed to HWCI workflow (if empty, output from hwci-determine-tests step is used)'\n required: false\n\npermissions:\n contents: read\n\njobs:\n hwci-determine-tests:\n runs-on: ubuntu-latest\n\n # Don't run on a pull request, as explained above.\n if: github.event_name != 'pull_request'\n\n outputs:\n hwci-tests-json: ${{ steps.determine-tests.outputs.hwci-tests-json }}\n\n steps:\n - name: Checkout the tock/tock repository\n uses: actions/checkout@v4\n with:\n # Checkout the repository at the commit that triggered the workflow\n repository: tock/tock\n ref: ${{ github.sha }}\n path: tock-tock\n\n - name: Checkout the tock-hardware-ci repository\n uses: actions/checkout@v4\n with:\n repository: tock/tock-hardware-ci\n # Change this in accordance with the two other `tock-hardware-ci` refs\n # referenced below in the reusable workflow's parameters:\n ref: 'main'\n path: tock-hardware-ci\n\n - name: Analyze changes to determine relevant tests\n id: determine-tests\n run: |\n # Ensure Python dependencies are installed\n python3 -m pip install --user --upgrade pip\n\n # Run the select_tests.py script\n python3 tock-hardware-ci/hwci/select_tests.py \\n --repo-path tock-tock \\n --hwci-path tock-hardware-ci/hwci \\n --output selected_tests.json\n\n echo "Selected HWCI tests:"\n cat selected_tests.json\n\n # Output the tests JSON\n hwci_tests_json=$(cat selected_tests.json | jq -c '.')\n echo "hwci-tests-json=${hwci_tests_json}" >> "$GITHUB_OUTPUT"\n\n hwci-treadmill-dispatch:\n needs: [hwci-determine-tests]\n\n # This checks whether there is at least one test to run, see\n # https://github.com/orgs/community/discussions/27125#discussioncomment-3254720\n #\n # Don't run on a pull request, as explained above.\n if: github.event_name != 'pull_request' && (fromJSON(needs.hwci-determine-tests.outputs.hwci-tests-json)[0] != null || github.event_name == 'workflow_dispatch')\n\n # The main tock-hardware-ci workflow is imported from another repository. It\n # can be reused across multiple Tock repositories such as the kernel,\n # libtock-c, and libtock-rs.\n uses: tock/tock-hardware-ci/.github/workflows/treadmill-ci.yml@main\n\n with:\n # Only run on a specific repository, as others will not have the right\n # environments set up and secrets configured. Forks may want to change\n # this parameter.\n repository-filter: 'tock/tock'\n\n # Provide access to the required Treadmill secrets by running in the\n # appropriate environment (depending on the `on:` triggers above)\n job-environment: ${{ (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') && 'treadmill-ci' || 'treadmill-ci-merged' }}\n\n # Reference for tock-hardware-ci repo, change if you want a specific test\n # suite. In this case, you should also update the branch reference in the\n # "uses" line above.\n tock-hardware-ci-ref: 'main'\n\n # Test the tock kernel revision that triggered this workflow:\n tock-kernel-ref: ${{ github.event_name == 'workflow_dispatch' && inputs.tock-kernel-ref || github.sha }}\n\n # Use the latest upstream libtock-c library:\n libtock-c-ref: ${{ github.event_name == 'workflow_dispatch' && inputs.libtock-c-ref || 'master' }}\n\n # Pass the selected tests:\n tests-json: ${{ (github.event_name == 'workflow_dispatch' && inputs.tests-json != '') && inputs.tests-json || needs.hwci-determine-tests.outputs.hwci-tests-json }}\n\n secrets: inherit\n\n # We cannot depend on *all* test-execute jobs of hwci-treadmill-dispatch as\n # required checks for pull requests and merge queues. Thus, we run another\n # single dummy step here that waits for all the hwci-treadmill-dispatch jobs\n # to complete and report success.\n #\n # We also use this to report a "dummy" success value for the "pull_request"\n # trigger, as explained in the comment of the "on:" parameters above.\n hwci-report-success:\n needs: [hwci-determine-tests, hwci-treadmill-dispatch]\n\n if: always()\n\n runs-on: ubuntu-latest\n\n steps:\n - name: Fail if any of the 'hwci-treadmill-dispatch' jobs failed\n if: github.event_name != 'pull_request' && contains(needs.*.result, 'failure')\n run: exit 1\n
|
dataset_sample\yaml\tock_tock\.github\workflows\treadmill-ci.yml
|
treadmill-ci.yml
|
YAML
| 7,234 | 0.95 | 0.106742 | 0.489796 |
python-kit
| 722 |
2024-04-14T06:29:56.103776
|
MIT
| false |
5898e897338c9aba41461605943f4d1b
|
# Licensed under the Apache License, Version 2.0 or the MIT License.\n# SPDX-License-Identifier: Apache-2.0 OR MIT\n# Copyright Tock Contributors 2022.\n\nname: tock-netlify-cache\n
|
dataset_sample\yaml\tock_tock\tools\netlify-cache\manifest.yml
|
manifest.yml
|
YAML
| 176 | 0.8 | 0 | 0.75 |
react-lib
| 913 |
2024-09-14T15:14:13.192478
|
BSD-3-Clause
| false |
5a0454923d6e1c668a1c02a348b19a45
|
coverage:\n status:\n project:\n default:\n # https://docs.codecov.com/docs/commit-status#informational\n informational: true\n target: 80%\n patch:\n default:\n informational: true\n target: 80%\ngithub_checks:\n annotations: false\n
|
dataset_sample\yaml\tracel-ai_burn\codecov.yml
|
codecov.yml
|
YAML
| 276 | 0.8 | 0 | 0.076923 |
python-kit
| 490 |
2023-10-02T05:25:44.843559
|
BSD-3-Clause
| false |
740c51724859f1b7b32ee42838e2a61a
|
version: 2\n\nupdates:\n - package-ecosystem: "github-actions"\n directory: "/"\n schedule:\n interval: "daily"\n\n - package-ecosystem: "cargo"\n directory: "/"\n schedule:\n interval: "weekly"\n
|
dataset_sample\yaml\tracel-ai_burn\.github\dependabot.yml
|
dependabot.yml
|
YAML
| 208 | 0.7 | 0 | 0 |
vue-tools
| 6 |
2023-11-12T00:06:42.972544
|
BSD-3-Clause
| false |
8f76436388f73a157096d57b7ba54dd3
|
name: Combine Dependabot PRs\n\non:\n schedule:\n - cron: '0 6 * * MON' # Monday at 6:00am UTC\n workflow_dispatch: # allows to manually trigger the workflow as well\n\n# The minimum permissions required to run this Action\npermissions:\n contents: write\n pull-requests: write\n checks: read\n\njobs:\n combine-prs:\n runs-on: ubuntu-latest\n\n steps:\n - name: combine-prs\n id: combine-prs\n uses: github/[email protected] # where X.X.X is the latest version\n with:\n labels: dependencies,automated\n
|
dataset_sample\yaml\tracel-ai_burn\.github\workflows\combine-dependabot-prs.yml
|
combine-dependabot-prs.yml
|
YAML
| 532 | 0.95 | 0 | 0.052632 |
vue-tools
| 34 |
2023-08-03T06:21:38.768271
|
GPL-3.0
| false |
abe8d4871a857967cf849527bcfc4abe
|
name: dependencies\n\non:\n schedule:\n - cron: '0 21 * * TUE' # Run every Tuesday at 21:00 (UTC)\n push:\n tags:\n - 'v*.*.*' # Run when a new version is being published\n\nenv:\n #\n # Dependency versioning\n #\n\n # Udeps version\n UDEPS_VERSION: "0.1.143"\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n dependencies:\n runs-on: ubuntu-latest\n\n steps:\n\n - name: checkout\n uses: actions/checkout@v4\n\n - name: Audit Rust dependencies\n # If a vulnerability is found, a new issue will automatically be opened\n # since this action runs on main branch\n uses: actions-rust-lang/audit@v1\n\n - name: Detect multiple versions of the same crate\n uses: EmbarkStudios/cargo-deny-action@v2\n with:\n command: check bans licenses sources\n\n - name: Install Rust nightly\n uses: dtolnay/rust-toolchain@nightly\n with:\n toolchain: nightly\n components: rustfmt\n\n - name: Install cargo-udeps\n env:\n UDEPS_LINK: https://github.com/est31/cargo-udeps/releases/download\n run: |\n curl -L "$UDEPS_LINK/v$UDEPS_VERSION/cargo-udeps-v$UDEPS_VERSION-x86_64-unknown-linux-gnu.tar.gz" |\n tar xz -C $HOME/.cargo/bin --strip-components 2\n\n - name: Run cargo-udeps\n run: |\n cargo +nightly udeps --all-targets\n
|
dataset_sample\yaml\tracel-ai_burn\.github\workflows\dependencies.yml
|
dependencies.yml
|
YAML
| 1,403 | 0.8 | 0 | 0.136364 |
node-utils
| 624 |
2023-09-19T14:04:56.722796
|
BSD-3-Clause
| false |
502ca6d7cc44ea909f5d4fd8f8462c9b
|
name: publish\n\non:\n push:\n tags:\n - "v*"\n\njobs:\n publish-burn-vision:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: burn-vision\n needs:\n - publish-burn-autodiff\n - publish-burn-candle\n - publish-burn-fusion\n - publish-burn-cubecl-fusion\n - publish-burn-cubecl\n - publish-burn-ndarray\n - publish-burn-tch\n - publish-burn-tensor\n - publish-burn-ir\n - publish-burn-tensor-testgen\n # dev dependencies\n - publish-burn-wgpu\n - publish-burn-cuda\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-router:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: burn-router\n needs:\n - publish-burn-ir\n - publish-burn-common\n - publish-burn-tensor\n # dev dependencies\n - publish-burn-autodiff\n - publish-burn-ndarray\n - publish-burn-wgpu\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-remote:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: burn-remote\n needs:\n - publish-burn-ir\n - publish-burn-common\n - publish-burn-tensor\n - publish-burn-router\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-derive:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: burn-derive\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-dataset:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: burn-dataset\n needs:\n - publish-burn-common\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-common:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: burn-common\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-tensor-testgen:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: burn-tensor-testgen\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-tensor:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor-testgen\n - publish-burn-common\n with:\n crate: burn-tensor\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-ir:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor\n with:\n crate: burn-ir\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-fusion:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-ir\n - publish-burn-tensor\n - publish-burn-common\n with:\n crate: burn-fusion\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-cubecl-fusion:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-ir\n - publish-burn-common\n - publish-burn-fusion\n - publish-burn-tensor\n with:\n crate: burn-cubecl-fusion\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-cubecl:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-ir\n - publish-burn-common\n - publish-burn-fusion\n - publish-burn-cubecl-fusion\n - publish-burn-tensor\n - publish-burn-ndarray\n with:\n crate: burn-cubecl\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-autodiff:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor\n - publish-burn-tensor-testgen\n - publish-burn-common\n with:\n crate: burn-autodiff\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-tch:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor\n - publish-burn-autodiff\n with:\n crate: burn-tch\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-ndarray:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-ir\n - publish-burn-tensor\n - publish-burn-autodiff\n - publish-burn-common\n with:\n crate: burn-ndarray\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-wgpu:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor\n - publish-burn-autodiff\n - publish-burn-ndarray\n - publish-burn-common\n - publish-burn-cubecl\n with:\n crate: burn-wgpu\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-cuda:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor\n - publish-burn-autodiff\n - publish-burn-ndarray\n - publish-burn-common\n - publish-burn-cubecl\n with:\n crate: burn-cuda\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-rocm:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor\n - publish-burn-autodiff\n - publish-burn-ndarray\n - publish-burn-common\n - publish-burn-cubecl\n with:\n crate: burn-rocm\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-candle:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-tensor\n - publish-burn-autodiff\n - publish-burn-tch\n with:\n crate: burn-candle\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-core:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-dataset\n - publish-burn-common\n - publish-burn-derive\n - publish-burn-tensor\n - publish-burn-autodiff\n - publish-burn-wgpu\n - publish-burn-tch\n - publish-burn-ndarray\n - publish-burn-candle\n - publish-burn-remote\n with:\n crate: burn-core\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-train:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-core\n with:\n crate: burn-train\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn-core\n - publish-burn-train\n with:\n crate: burn\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-burn-import:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n needs:\n - publish-burn\n with:\n crate: burn-import\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n\n publish-onnx-ir:\n uses: tracel-ai/github-actions/.github/workflows/publish-crate.yml@v1\n with:\n crate: onnx-ir\n secrets:\n CRATES_IO_API_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}\n
|
dataset_sample\yaml\tracel-ai_burn\.github\workflows\publish.yml
|
publish.yml
|
YAML
| 7,549 | 0.95 | 0 | 0.007937 |
node-utils
| 93 |
2023-12-21T15:34:42.346081
|
GPL-3.0
| false |
313a84b4c57cb91013cf6feb42a6c3c0
|
name: semver-checks\n\non:\n schedule:\n - cron: '0 21 * * THU' # Run every Thursday at 21:00 (UTC)\n push:\n tags:\n - 'v*.*.*' # Run when a new version is being published\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n semver-checks:\n runs-on: ubuntu-latest\n\n steps:\n - name: checkout\n uses: actions/checkout@v4\n\n - name: Check semantic versioning violations\n uses: obi1kenobi/cargo-semver-checks-action@v2\n with:\n # cargo-semver-checks uses `all-features` by default, but `burn`\n # publishes on crates.io with `default-features`\n feature-group: default-features\n # Exclude crates which are not published on crates.io\n exclude: burn-no-std-tests,onnx-tests,pytorch-tests\n
|
dataset_sample\yaml\tracel-ai_burn\.github\workflows\semver-checks.yml
|
semver-checks.yml
|
YAML
| 813 | 0.8 | 0 | 0.125 |
python-kit
| 741 |
2024-04-04T07:53:16.214218
|
BSD-3-Clause
| false |
a028c37a1a76cedf87540abfee627a4e
|
name: Stale Pull Requests\n\non:\n schedule:\n - cron: '0 12 * * *' # Run every day at 12:00 (UTC)\n\n# The minimum permissions required to run this Action\npermissions:\n contents: write # only for delete-branch option\n issues: write\n pull-requests: write\n\njobs:\n stale-pr:\n\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Stale pull requests\n uses: actions/stale@v9\n with:\n # The idle number of days before marking issues stale.\n #\n # With a negative number like -1, no issues\n # will be marked as stale automatically.\n days-before-issue-stale: -1\n # The idle number of days before marking pull requests stale\n days-before-pr-stale: 30\n # The idle number of days before closing\n # the stale pull requests (due to the stale label).\n #\n # With a negative number like -1, the pull requests\n # will never be closed automatically.\n days-before-pr-close: -1\n # Label to apply on staled pull requests\n stale-pr-label: 'stale'\n # The message that will be added as a comment to the pull request\n stale-pr-message: 'This PR has been marked as stale because it has not been updated for over a month'\n # Remove `stale` label from pull requests on updates/comments\n remove-pr-stale-when-updated: true\n
|
dataset_sample\yaml\tracel-ai_burn\.github\workflows\stale-pr.yml
|
stale-pr.yml
|
YAML
| 1,371 | 0.95 | 0.047619 | 0.388889 |
node-utils
| 950 |
2024-04-01T14:41:41.800433
|
GPL-3.0
| false |
64098c78feedaf371d5e82058cb16a57
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.