mirror of
https://github.com/actions/cache.git
synced 2025-10-29 00:38:36 +08:00
Merge commit 'b2a0330cdf8f73b245abd87c6e608a524d1798fb'
This commit is contained in:
commit
083b9c47d0
22
.github/dependabot.yml
vendored
Normal file
22
.github/dependabot.yml
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
groups:
|
||||
minor-actions-dependencies:
|
||||
update-types: [minor, patch]
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
allow:
|
||||
- dependency-type: direct
|
||||
- dependency-type: production
|
||||
19
.github/workflows/check-dist.yml
vendored
19
.github/workflows/check-dist.yml
vendored
|
|
@ -1,19 +0,0 @@
|
|||
name: Check dist/
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
call-check-dist:
|
||||
name: Check dist/
|
||||
uses: actions/reusable-workflows/.github/workflows/check-dist.yml@main
|
||||
with:
|
||||
node-version: "20.x"
|
||||
22
.github/workflows/close-inactive-issues.yml
vendored
22
.github/workflows/close-inactive-issues.yml
vendored
|
|
@ -1,22 +0,0 @@
|
|||
name: Close inactive issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 8 * * *"
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v3
|
||||
with:
|
||||
days-before-issue-stale: 200
|
||||
days-before-issue-close: 5
|
||||
stale-issue-label: "stale"
|
||||
stale-issue-message: "This issue is stale because it has been open for 200 days with no activity. Leave a comment to avoid closing this issue in 5 days."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
46
.github/workflows/codeql.yml
vendored
46
.github/workflows/codeql.yml
vendored
|
|
@ -1,46 +0,0 @@
|
|||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 19 * * 0'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java, ruby
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
||||
# three lines and modify them (or add more) to build your code if your
|
||||
# project uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
16
.github/workflows/issue-opened-workflow.yml
vendored
16
.github/workflows/issue-opened-workflow.yml
vendored
|
|
@ -1,16 +0,0 @@
|
|||
name: Assign issue
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
jobs:
|
||||
run-action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get current oncall
|
||||
id: oncall
|
||||
run: |
|
||||
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: add_assignees
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
15
.github/workflows/licensed.yml
vendored
15
.github/workflows/licensed.yml
vendored
|
|
@ -1,15 +0,0 @@
|
|||
name: Licensed
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
call-licensed:
|
||||
name: Licensed
|
||||
uses: actions/reusable-workflows/.github/workflows/licensed.yml@main
|
||||
20
.github/workflows/pr-opened-workflow.yml
vendored
20
.github/workflows/pr-opened-workflow.yml
vendored
|
|
@ -1,20 +0,0 @@
|
|||
name: Add Reviewer PR
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
jobs:
|
||||
run-action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get current oncall
|
||||
id: oncall
|
||||
run: |
|
||||
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Request Review
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/pulls/${{ github.event.pull_request.number}}/requested_reviewers -d '{"reviewers":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- name: Add Assignee
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
20
.github/workflows/publish-immutable-actions.yml
vendored
Normal file
20
.github/workflows/publish-immutable-actions.yml
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
name: 'Publish Immutable Action Version'
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checking out
|
||||
uses: actions/checkout@v4
|
||||
- name: Publish
|
||||
id: publish
|
||||
uses: actions/publish-immutable-action@0.0.3
|
||||
78
.github/workflows/tests.yml
vendored
Normal file
78
.github/workflows/tests.yml
vendored
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
name: Tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- releases/**
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/**
|
||||
- fix/**
|
||||
- v4*
|
||||
|
||||
jobs:
|
||||
# End to end save and restore
|
||||
test-save:
|
||||
runs-on: runs-on,runner=2cpu-linux-arm64
|
||||
strategy:
|
||||
matrix:
|
||||
part_size: [32]
|
||||
queue_size: [4, 8]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Generate files in working directory
|
||||
shell: bash
|
||||
run: |
|
||||
__tests__/create-cache-files.sh ${{ runner.os }} test-cache
|
||||
# 5GB
|
||||
# curl -o test-cache/ubuntu.iso https://releases.ubuntu.com/22.04.3/ubuntu-22.04.3-desktop-amd64.iso
|
||||
# 2Gb
|
||||
curl -o test-cache/ubuntu.iso https://releases.ubuntu.com/jammy/ubuntu-22.04.3-live-server-amd64.iso
|
||||
- name: Generate files outside working directory
|
||||
shell: bash
|
||||
run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||
- name: Save cache
|
||||
uses: ./
|
||||
env:
|
||||
UPLOAD_PART_SIZE: ${{ matrix.part_size }}
|
||||
UPLOAD_QUEUE_SIZE: ${{ matrix.queue_size }}
|
||||
with:
|
||||
key: test-${{ runner.os }}-${{ github.run_id }}-${{ matrix.part_size }}-${{ matrix.queue_size }}
|
||||
path: |
|
||||
test-cache
|
||||
~/test-cache
|
||||
test-restore:
|
||||
needs: test-save
|
||||
strategy:
|
||||
matrix:
|
||||
part_size: [8, 16]
|
||||
queue_size: [8, 12]
|
||||
fail-fast: false
|
||||
runs-on: runs-on,runner=2cpu-linux-arm64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Restore cache
|
||||
uses: ./
|
||||
env:
|
||||
DOWNLOAD_PART_SIZE: ${{ matrix.part_size }}
|
||||
DOWNLOAD_QUEUE_SIZE: ${{ matrix.queue_size }}
|
||||
with:
|
||||
key: test-${{ runner.os }}-${{ github.run_id }}-${{ matrix.part_size }}-${{ matrix.queue_size }}
|
||||
restore-keys: |
|
||||
test-${{ runner.os }}-${{ github.run_id }}-${{ matrix.part_size }}-
|
||||
test-${{ runner.os }}-${{ github.run_id }}-
|
||||
path: |
|
||||
test-cache
|
||||
~/test-cache
|
||||
- name: Verify cache files in working directory
|
||||
shell: bash
|
||||
run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache
|
||||
- name: Verify cache files outside working directory
|
||||
shell: bash
|
||||
run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||
129
.github/workflows/workflow.yml
vendored
129
.github/workflows/workflow.yml
vendored
|
|
@ -1,129 +0,0 @@
|
|||
name: Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- releases/**
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/**
|
||||
|
||||
jobs:
|
||||
# Build and unit test
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
- run: npm ci
|
||||
- name: Prettier Format Check
|
||||
run: npm run format-check
|
||||
- name: ESLint Check
|
||||
run: npm run lint
|
||||
- name: Build & Test
|
||||
run: npm run test
|
||||
|
||||
# End to end save and restore
|
||||
test-save:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Generate files in working directory
|
||||
shell: bash
|
||||
run: __tests__/create-cache-files.sh ${{ runner.os }} test-cache
|
||||
- name: Generate files outside working directory
|
||||
shell: bash
|
||||
run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||
- name: Save cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||
path: |
|
||||
test-cache
|
||||
~/test-cache
|
||||
test-restore:
|
||||
needs: test-save
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Restore cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||
path: |
|
||||
test-cache
|
||||
~/test-cache
|
||||
- name: Verify cache files in working directory
|
||||
shell: bash
|
||||
run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache
|
||||
- name: Verify cache files outside working directory
|
||||
shell: bash
|
||||
run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||
|
||||
# End to end with proxy
|
||||
test-proxy-save:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:latest
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Generate files
|
||||
run: __tests__/create-cache-files.sh proxy test-cache
|
||||
- name: Save cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-proxy-${{ github.run_id }}
|
||||
path: test-cache
|
||||
test-proxy-restore:
|
||||
needs: test-proxy-save
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:latest
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Restore cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-proxy-${{ github.run_id }}
|
||||
path: test-cache
|
||||
- name: Verify cache
|
||||
run: __tests__/verify-cache-files.sh proxy test-cache
|
||||
|
|
@ -13,4 +13,10 @@ allowed:
|
|||
|
||||
reviewed:
|
||||
npm:
|
||||
- sax
|
||||
- sax
|
||||
- "@protobuf-ts/plugin-framework" # Apache-2.0
|
||||
- "@protobuf-ts/runtime" # Apache-2.0
|
||||
- fs.realpath # ISC
|
||||
- glob # ISC
|
||||
- prettier # MIT
|
||||
- lodash # MIT
|
||||
BIN
.licenses/NOTICE
generated
Normal file
BIN
.licenses/NOTICE
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@actions/cache.dep.yml
generated
BIN
.licenses/npm/@actions/cache.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@actions/core.dep.yml
generated
BIN
.licenses/npm/@actions/core.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@actions/io.dep.yml
generated
BIN
.licenses/npm/@actions/io.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@protobuf-ts/plugin-framework.dep.yml
generated
Normal file
BIN
.licenses/npm/@protobuf-ts/plugin-framework.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@protobuf-ts/plugin.dep.yml
generated
Normal file
BIN
.licenses/npm/@protobuf-ts/plugin.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@protobuf-ts/protoc.dep.yml
generated
Normal file
BIN
.licenses/npm/@protobuf-ts/protoc.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@protobuf-ts/runtime-rpc.dep.yml
generated
Normal file
BIN
.licenses/npm/@protobuf-ts/runtime-rpc.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@protobuf-ts/runtime.dep.yml
generated
Normal file
BIN
.licenses/npm/@protobuf-ts/runtime.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/camel-case.dep.yml
generated
Normal file
BIN
.licenses/npm/camel-case.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/commander.dep.yml
generated
Normal file
BIN
.licenses/npm/commander.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/dot-object.dep.yml
generated
Normal file
BIN
.licenses/npm/dot-object.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/fs.realpath.dep.yml
generated
Normal file
BIN
.licenses/npm/fs.realpath.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/glob.dep.yml
generated
Normal file
BIN
.licenses/npm/glob.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/inflight.dep.yml
generated
Normal file
BIN
.licenses/npm/inflight.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/inherits.dep.yml
generated
Normal file
BIN
.licenses/npm/inherits.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/lodash.dep.yml
generated
Normal file
BIN
.licenses/npm/lodash.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/lower-case.dep.yml
generated
Normal file
BIN
.licenses/npm/lower-case.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/no-case.dep.yml
generated
Normal file
BIN
.licenses/npm/no-case.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/once.dep.yml
generated
Normal file
BIN
.licenses/npm/once.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/pascal-case.dep.yml
generated
Normal file
BIN
.licenses/npm/pascal-case.dep.yml
generated
Normal file
Binary file not shown.
Binary file not shown.
BIN
.licenses/npm/path-to-regexp.dep.yml
generated
Normal file
BIN
.licenses/npm/path-to-regexp.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/prettier.dep.yml
generated
Normal file
BIN
.licenses/npm/prettier.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/ts-poet.dep.yml
generated
Normal file
BIN
.licenses/npm/ts-poet.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/tslib-2.8.1.dep.yml
generated
Normal file
BIN
.licenses/npm/tslib-2.8.1.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/twirp-ts.dep.yml
generated
Normal file
BIN
.licenses/npm/twirp-ts.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/typescript.dep.yml
generated
Normal file
BIN
.licenses/npm/typescript.dep.yml
generated
Normal file
Binary file not shown.
Binary file not shown.
BIN
.licenses/npm/wrappy.dep.yml
generated
Normal file
BIN
.licenses/npm/wrappy.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/yaml.dep.yml
generated
Normal file
BIN
.licenses/npm/yaml.dep.yml
generated
Normal file
Binary file not shown.
|
|
@ -1,6 +1,6 @@
|
|||
# Shockingly faster cache action
|
||||
|
||||
This action is a drop-in replacement for the official `actions/cache@v4` action, for use with the [RunsOn](https://runs-on.com) self-hosted GitHub Action runner provider, or with your own self-hosted runner solution.
|
||||
This action is a drop-in replacement for the official `actions/cache@v4` action, for use with the [RunsOn](https://runs-on.com/?ref=cache) self-hosted GitHub Action runner provider, or with your own self-hosted runner solution.
|
||||
|
||||

|
||||
|
||||
|
|
@ -38,3 +38,10 @@ If you want to use this in your own infrastructure, setup your AWS credentials w
|
|||
```
|
||||
|
||||
Be aware of S3 transfer costs if your runners are not in the same AWS region as your bucket.
|
||||
|
||||
## Special environment variables
|
||||
|
||||
* `RUNS_ON_S3_BUCKET_CACHE`: if set, the action will use this bucket to store the cache.
|
||||
* `RUNS_ON_RUNNER_NAME`: when running on RunsOn, where this environment variable is non-empty, existing AWS credentials from the environment will be discarded. If you want to preserve existing environment variables, set this to the empty string `""`.
|
||||
* `RUNS_ON_S3_FORCE_PATH_STYLE` or `AWS_S3_FORCE_PATH_STYLE`: if one of those environment variables equals the string `"true"`, then the S3 client will be configured to force the path style.
|
||||
|
||||
|
|
|
|||
174
RELEASES.md
174
RELEASES.md
|
|
@ -1,77 +1,98 @@
|
|||
# Releases
|
||||
|
||||
### 3.0.0
|
||||
### 4.2.3
|
||||
|
||||
- Updated minimum runner version support from node 12 -> node 16
|
||||
- Bump `@actions/cache` to v4.0.3 (obfuscates SAS token in debug logs for cache entries)
|
||||
|
||||
### 3.0.1
|
||||
### 4.2.2
|
||||
|
||||
- Added support for caching from GHES 3.5.
|
||||
- Fixed download issue for files > 2GB during restore.
|
||||
- Bump `@actions/cache` to v4.0.2
|
||||
|
||||
### 3.0.2
|
||||
### 4.2.1
|
||||
|
||||
- Added support for dynamic cache size cap on GHES.
|
||||
- Bump `@actions/cache` to v4.0.1
|
||||
|
||||
### 3.0.3
|
||||
### 4.2.0
|
||||
|
||||
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
TLDR; The cache backend service has been rewritten from the ground up for improved performance and reliability. [actions/cache](https://github.com/actions/cache) now integrates with the new cache service (v2) APIs.
|
||||
|
||||
### 3.0.4
|
||||
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in these release are **fully backward compatible**.
|
||||
|
||||
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
|
||||
**We are deprecating some versions of this action**. We recommend upgrading to version `v4` or `v3` as soon as possible before **February 1st, 2025.** (Upgrade instructions below).
|
||||
|
||||
### 3.0.5
|
||||
If you are using pinned SHAs, please use the SHAs of versions `v4.2.0` or `v3.4.0`
|
||||
|
||||
- Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834))
|
||||
If you do not upgrade, all workflow runs using any of the deprecated [actions/cache](https://github.com/actions/cache) will fail.
|
||||
|
||||
### 3.0.6
|
||||
Upgrading to the recommended versions will not break your workflows.
|
||||
|
||||
- Fixed [#809](https://github.com/actions/cache/issues/809) - zstd -d: no such file or directory error
|
||||
- Fixed [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory
|
||||
### 4.1.2
|
||||
|
||||
### 3.0.7
|
||||
- Add GitHub Enterprise Cloud instances hostname filters to inform API endpoint choices - [#1474](https://github.com/actions/cache/pull/1474)
|
||||
- Security fix: Bump braces from 3.0.2 to 3.0.3 - [#1475](https://github.com/actions/cache/pull/1475)
|
||||
|
||||
- Fixed [#810](https://github.com/actions/cache/issues/810) - download stuck issue. A new timeout is introduced in the download process to abort the download if it gets stuck and doesn't finish within an hour.
|
||||
### 4.1.1
|
||||
|
||||
### 3.0.8
|
||||
- Restore original behavior of `cache-hit` output - [#1467](https://github.com/actions/cache/pull/1467)
|
||||
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
### 4.1.0
|
||||
|
||||
### 3.0.9
|
||||
- Ensure `cache-hit` output is set when a cache is missed - [#1404](https://github.com/actions/cache/pull/1404)
|
||||
- Deprecate `save-always` input - [#1452](https://github.com/actions/cache/pull/1452)
|
||||
|
||||
- Enhanced the warning message for cache unavailablity in case of GHES.
|
||||
### 4.0.2
|
||||
|
||||
### 3.0.10
|
||||
- Fixed restore `fail-on-cache-miss` not working.
|
||||
|
||||
- Fix a bug with sorting inputs.
|
||||
- Update definition for restore-keys in README.md
|
||||
### 4.0.1
|
||||
|
||||
### 3.0.11
|
||||
- Updated `isGhes` check
|
||||
|
||||
- Update toolkit version to 3.0.5 to include `@actions/core@^1.10.0`
|
||||
- Update `@actions/cache` to use updated `saveState` and `setOutput` functions from `@actions/core@^1.10.0`
|
||||
### 4.0.0
|
||||
|
||||
### 3.1.0-beta.1
|
||||
- Updated minimum runner version support from node 12 -> node 20
|
||||
|
||||
- Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
|
||||
### 3.4.0
|
||||
|
||||
### 3.1.0-beta.2
|
||||
- Integrated with the new cache service (v2) APIs
|
||||
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
### 3.3.3
|
||||
|
||||
### 3.1.0-beta.3
|
||||
- Updates @actions/cache to v3.2.3 to fix accidental mutated path arguments to `getCacheVersion` [actions/toolkit#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
- Additional audit fixes of npm package(s)
|
||||
|
||||
- Bug fixes for bsdtar fallback if gnutar not available and gzip fallback if cache saved using old cache action on windows.
|
||||
### 3.3.2
|
||||
|
||||
### 3.2.0-beta.1
|
||||
- Fixes bug with Azure SDK causing blob downloads to get stuck.
|
||||
|
||||
- Added two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache.
|
||||
### 3.3.1
|
||||
|
||||
### 3.2.0
|
||||
- Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck.
|
||||
|
||||
- Released the two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache
|
||||
### 3.3.0
|
||||
|
||||
- Added option to lookup cache without downloading it.
|
||||
|
||||
### 3.2.6
|
||||
|
||||
- Fix zstd not being used after zstd version upgrade to 1.5.4 on hosted runners.
|
||||
|
||||
### 3.2.5
|
||||
|
||||
- Added fix to prevent from setting MYSYS environment variable globally.
|
||||
|
||||
### 3.2.4
|
||||
|
||||
- Added option to fail job on cache miss.
|
||||
|
||||
### 3.2.3
|
||||
|
||||
- Support cross os caching on Windows as an opt-in feature.
|
||||
- Fix issue with symlink restoration on Windows for cross-os caches.
|
||||
|
||||
### 3.2.2
|
||||
|
||||
- Reverted the changes made in 3.2.1 to use gnu tar and zstd by default on windows.
|
||||
|
||||
### 3.2.1
|
||||
|
||||
|
|
@ -79,44 +100,75 @@
|
|||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
- Added logs for cache version in case of a cache miss.
|
||||
|
||||
### 3.2.2
|
||||
### 3.2.0
|
||||
|
||||
- Reverted the changes made in 3.2.1 to use gnu tar and zstd by default on windows.
|
||||
- Released the two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache
|
||||
|
||||
### 3.2.3
|
||||
### 3.2.0-beta.1
|
||||
|
||||
- Support cross os caching on Windows as an opt-in feature.
|
||||
- Fix issue with symlink restoration on Windows for cross-os caches.
|
||||
- Added two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache.
|
||||
|
||||
### 3.2.4
|
||||
### 3.1.0-beta.3
|
||||
|
||||
- Added option to fail job on cache miss.
|
||||
- Bug fixes for bsdtar fallback if gnutar not available and gzip fallback if cache saved using old cache action on windows.
|
||||
|
||||
### 3.2.5
|
||||
### 3.1.0-beta.2
|
||||
|
||||
- Added fix to prevent from setting MYSYS environment variable globally.
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.2.6
|
||||
### 3.1.0-beta.1
|
||||
|
||||
- Fix zstd not being used after zstd version upgrade to 1.5.4 on hosted runners.
|
||||
- Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
|
||||
|
||||
### 3.3.0
|
||||
### 3.0.11
|
||||
|
||||
- Added option to lookup cache without downloading it.
|
||||
- Update toolkit version to 3.0.5 to include `@actions/core@^1.10.0`
|
||||
- Update `@actions/cache` to use updated `saveState` and `setOutput` functions from `@actions/core@^1.10.0`
|
||||
|
||||
### 3.3.1
|
||||
### 3.0.10
|
||||
|
||||
- Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck.
|
||||
- Fix a bug with sorting inputs.
|
||||
- Update definition for restore-keys in README.md
|
||||
|
||||
### 3.3.2
|
||||
### 3.0.9
|
||||
|
||||
- Fixes bug with Azure SDK causing blob downloads to get stuck.
|
||||
- Enhanced the warning message for cache unavailablity in case of GHES.
|
||||
|
||||
### 3.3.3
|
||||
### 3.0.8
|
||||
|
||||
- Updates @actions/cache to v3.2.3 to fix accidental mutated path arguments to `getCacheVersion` [actions/toolkit#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
- Additional audit fixes of npm package(s)
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
|
||||
### 4.0.0
|
||||
### 3.0.7
|
||||
|
||||
- Updated minimum runner version support from node 12 -> node 20
|
||||
- Fixed [#810](https://github.com/actions/cache/issues/810) - download stuck issue. A new timeout is introduced in the download process to abort the download if it gets stuck and doesn't finish within an hour.
|
||||
|
||||
### 3.0.6
|
||||
|
||||
- Fixed [#809](https://github.com/actions/cache/issues/809) - zstd -d: no such file or directory error
|
||||
- Fixed [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory
|
||||
|
||||
### 3.0.5
|
||||
|
||||
- Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834))
|
||||
|
||||
### 3.0.4
|
||||
|
||||
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
|
||||
|
||||
### 3.0.3
|
||||
|
||||
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
|
||||
### 3.0.2
|
||||
|
||||
- Added support for dynamic cache size cap on GHES.
|
||||
|
||||
### 3.0.1
|
||||
|
||||
- Added support for caching from GHES 3.5.
|
||||
- Fixed download issue for files > 2GB during restore.
|
||||
|
||||
### 3.0.0
|
||||
|
||||
- Updated minimum runner version support from node 12 -> node 16
|
||||
|
|
|
|||
|
|
@ -8,17 +8,26 @@ import * as testUtils from "../src/utils/testUtils";
|
|||
jest.mock("@actions/core");
|
||||
jest.mock("@actions/cache");
|
||||
|
||||
let pristineEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeAll(() => {
|
||||
pristineEnv = process.env;
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
process.env = pristineEnv;
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = pristineEnv;
|
||||
});
|
||||
|
||||
test("isGhes returns true if server url is not github.com", () => {
|
||||
try {
|
||||
process.env["GITHUB_SERVER_URL"] = "http://example.com";
|
||||
|
|
@ -231,3 +240,28 @@ test("isCacheFeatureAvailable for ac disabled on dotcom", () => {
|
|||
delete process.env["GITHUB_SERVER_URL"];
|
||||
}
|
||||
});
|
||||
|
||||
test("isGhes returns false when the GITHUB_SERVER_URL environment variable is not defined", async () => {
|
||||
delete process.env["GITHUB_SERVER_URL"];
|
||||
expect(actionUtils.isGhes()).toBeFalsy();
|
||||
});
|
||||
|
||||
test("isGhes returns false when the GITHUB_SERVER_URL environment variable is set to github.com", async () => {
|
||||
process.env["GITHUB_SERVER_URL"] = "https://github.com";
|
||||
expect(actionUtils.isGhes()).toBeFalsy();
|
||||
});
|
||||
|
||||
test("isGhes returns false when the GITHUB_SERVER_URL environment variable is set to a GitHub Enterprise Cloud-style URL", async () => {
|
||||
process.env["GITHUB_SERVER_URL"] = "https://contoso.ghe.com";
|
||||
expect(actionUtils.isGhes()).toBeFalsy();
|
||||
});
|
||||
|
||||
test("isGhes returns false when the GITHUB_SERVER_URL environment variable has a .localhost suffix", async () => {
|
||||
process.env["GITHUB_SERVER_URL"] = "https://mock-github.localhost";
|
||||
expect(actionUtils.isGhes()).toBeFalsy();
|
||||
});
|
||||
|
||||
test("isGhes returns true when the GITHUB_SERVER_URL environment variable is set to some other URL", async () => {
|
||||
process.env["GITHUB_SERVER_URL"] = "https://src.onpremise.fabrikam.com";
|
||||
expect(actionUtils.isGhes()).toBeTruthy();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -449,3 +449,19 @@ test("restore with lookup-only set", async () => {
|
|||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore failure with earlyExit should call process exit", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const processExitMock = jest.spyOn(process, "exit").mockImplementation();
|
||||
|
||||
// call restoreImpl with `earlyExit` set to true
|
||||
await restoreImpl(new StateProvider(), true);
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: key"
|
||||
);
|
||||
expect(processExitMock).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
|
|
|||
10
action.yml
10
action.yml
|
|
@ -9,7 +9,7 @@ inputs:
|
|||
description: 'An explicit key for restoring and saving the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||
description: 'An ordered multiline string listing the prefix-matched keys, that are used for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||
required: false
|
||||
upload-chunk-size:
|
||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||
|
|
@ -29,7 +29,11 @@ inputs:
|
|||
save-always:
|
||||
description: 'Run the post step to save the cache even if another step before fails'
|
||||
default: 'false'
|
||||
required: false
|
||||
required: false
|
||||
deprecationMessage: |
|
||||
save-always does not work as intended and will be removed in a future release.
|
||||
A separate `actions/cache/restore` step should be used instead.
|
||||
See https://github.com/actions/cache/tree/main/save#always-save-cache for more details.
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
|
|
@ -37,7 +41,7 @@ runs:
|
|||
using: 'node20'
|
||||
main: 'dist/restore/index.js'
|
||||
post: 'dist/save/index.js'
|
||||
post-if: "success() || github.event.inputs.save-always"
|
||||
post-if: "success()"
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ This document lists some of the strategies (and example workflows if possible) w
|
|||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
key: ${{ some-metadata }}-cache
|
||||
```
|
||||
|
|
@ -24,7 +24,7 @@ In your workflows, you can use different strategies to name your key depending o
|
|||
One of the most common use case is to use hash for lockfile as key. This way, same cache will be restored for a lockfile until there's a change in dependencies listed in lockfile.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
path/to/dependencies
|
||||
|
|
@ -37,7 +37,7 @@ One of the most common use case is to use hash for lockfile as key. This way, sa
|
|||
If cache is not found matching the primary key, restore keys can be used to download the closest matching cache that was recently created. This ensures that the build/install step will need to additionally fetch just a handful of newer dependencies, and hence saving build time.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
path/to/dependencies
|
||||
|
|
@ -54,7 +54,7 @@ The restore keys can be provided as a complete name, or a prefix, read more [her
|
|||
In case of workflows with matrix running for multiple Operating Systems, the caches can be stored separately for each of them. This can be used in combination with hashfiles in case multiple caches are being generated per OS.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
path/to/dependencies
|
||||
|
|
@ -73,7 +73,7 @@ Caches scoped to the particular workflow run id or run attempt can be stored and
|
|||
On similar lines, commit sha can be used to create a very specialized and short lived cache.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
path/to/dependencies
|
||||
|
|
@ -81,12 +81,12 @@ On similar lines, commit sha can be used to create a very specialized and short
|
|||
key: cache-${{ github.sha }}
|
||||
```
|
||||
|
||||
### Using multiple factors while forming a key depening on the need
|
||||
### Using multiple factors while forming a key depending on the need
|
||||
|
||||
Cache key can be formed by combination of more than one metadata, evaluated info.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
path/to/dependencies
|
||||
|
|
@ -102,7 +102,7 @@ The [GitHub Context](https://docs.github.com/en/actions/learn-github-actions/con
|
|||
|
||||
While setting paths for caching dependencies it is important to give correct path depending on the hosted runner you are using or whether the action is running in a container job. Assigning different `path` for save and restore will result in cache miss.
|
||||
|
||||
Below are GiHub hosted runner specific paths one should take care of when writing a workflow which saves/restores caches across OS.
|
||||
Below are GitHub hosted runner specific paths one should take care of when writing a workflow which saves/restores caches across OS.
|
||||
|
||||
#### Ubuntu Paths
|
||||
|
||||
|
|
@ -146,9 +146,9 @@ In case you are using a centralized job to create and save your cache that can b
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
- uses: actions/cache/restore@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
@ -171,9 +171,9 @@ You can use the output of this action to exit the workflow on cache miss. This w
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
- uses: actions/cache/restore@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
@ -194,7 +194,7 @@ steps:
|
|||
If you want to avoid re-computing the cache key again in `save` action, the outputs from `restore` action can be used as input to the `save` action.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache/restore@v3
|
||||
- uses: actions/cache/restore@v4
|
||||
id: restore-cache
|
||||
with:
|
||||
path: |
|
||||
|
|
@ -204,7 +204,7 @@ If you want to avoid re-computing the cache key again in `save` action, the outp
|
|||
.
|
||||
.
|
||||
.
|
||||
- uses: actions/cache/save@v3
|
||||
- uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
path/to/dependencies
|
||||
|
|
@ -219,7 +219,7 @@ On the other hand, the key can also be explicitly re-computed while executing th
|
|||
Let's say we have a restore step that computes key at runtime
|
||||
|
||||
```yaml
|
||||
uses: actions/cache/restore@v3
|
||||
uses: actions/cache/restore@v4
|
||||
id: restore-cache
|
||||
with:
|
||||
key: cache-${{ hashFiles('**/lockfiles') }}
|
||||
|
|
@ -228,7 +228,7 @@ with:
|
|||
Case 1: Where an user would want to reuse the key as it is
|
||||
|
||||
```yaml
|
||||
uses: actions/cache/save@v3
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
key: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
```
|
||||
|
|
@ -236,31 +236,14 @@ with:
|
|||
Case 2: Where the user would want to re-evaluate the key
|
||||
|
||||
```yaml
|
||||
uses: actions/cache/save@v3
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
key: npm-cache-${{hashfiles(package-lock.json)}}
|
||||
```
|
||||
|
||||
### Saving cache even if the build fails
|
||||
|
||||
There can be cases where a cache should be saved even if the build job fails. For example, a job can fail due to flaky tests but the caches can still be re-used. You can use `actions/cache/save` action to save the cache by using `if: always()` condition.
|
||||
|
||||
Similarly, `actions/cache/save` action can be conditionally used based on the output of the previous steps. This way you get more control on when to save the cache.
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
.
|
||||
. // restore if need be
|
||||
.
|
||||
- name: Build
|
||||
run: /build.sh
|
||||
- uses: actions/cache/save@v3
|
||||
if: always() // or any other condition to invoke the save action
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
||||
See [Always save cache](./save/README.md#always-save-cache).
|
||||
|
||||
### Saving cache once and reusing in multiple workflows
|
||||
|
||||
|
|
@ -270,12 +253,12 @@ In case of multi-module projects, where the built artifact of one project needs
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build
|
||||
run: ./build-parent-module.sh
|
||||
|
||||
- uses: actions/cache/save@v3
|
||||
- uses: actions/cache/save@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
@ -286,9 +269,9 @@ steps:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
- uses: actions/cache/restore@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
|
|||
44082
dist/restore-only/index.js
vendored
44082
dist/restore-only/index.js
vendored
File diff suppressed because one or more lines are too long
44082
dist/restore/index.js
vendored
44082
dist/restore/index.js
vendored
File diff suppressed because one or more lines are too long
44068
dist/save-only/index.js
vendored
44068
dist/save-only/index.js
vendored
File diff suppressed because one or more lines are too long
44068
dist/save/index.js
vendored
44068
dist/save/index.js
vendored
File diff suppressed because one or more lines are too long
108
examples.md
108
examples.md
|
|
@ -1,5 +1,6 @@
|
|||
# Examples
|
||||
|
||||
- [Bun](#bun)
|
||||
- [C# - NuGet](#c---nuget)
|
||||
- [Clojure - Lein Deps](#clojure---lein-deps)
|
||||
- [D - DUB](#d---dub)
|
||||
|
|
@ -41,12 +42,32 @@
|
|||
- [Swift - Mint](#swift---mint)
|
||||
- [* - Bazel](#---bazel)
|
||||
|
||||
## Bun
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.bun/install/cache
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lockb') }}
|
||||
```
|
||||
|
||||
### Windows
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~\.bun
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lockb') }}
|
||||
```
|
||||
|
||||
## C# - NuGet
|
||||
|
||||
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
||||
|
|
@ -55,10 +76,10 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
|
|||
```
|
||||
|
||||
Depending on the environment, huge packages might be pre-installed in the global cache folder.
|
||||
With `actions/cache@v3` you can now exclude unwanted packages with [exclude pattern](https://github.com/actions/toolkit/tree/main/packages/glob#exclude-patterns)
|
||||
With `actions/cache@v4` you can now exclude unwanted packages with [exclude pattern](https://github.com/actions/toolkit/tree/main/packages/glob#exclude-patterns)
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.nuget/packages
|
||||
|
|
@ -75,7 +96,7 @@ Or you could move the cache folder like below.
|
|||
env:
|
||||
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
|
||||
steps:
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
||||
|
|
@ -87,7 +108,7 @@ steps:
|
|||
|
||||
```yaml
|
||||
- name: Cache lein project dependencies
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.m2/repository
|
||||
key: ${{ runner.os }}-clojure-${{ hashFiles('**/project.clj') }}
|
||||
|
|
@ -101,7 +122,7 @@ steps:
|
|||
### POSIX
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.dub
|
||||
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.selections.json') }}
|
||||
|
|
@ -112,7 +133,7 @@ steps:
|
|||
### Windows
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~\AppData\Local\dub
|
||||
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.selections.json') }}
|
||||
|
|
@ -125,7 +146,7 @@ steps:
|
|||
### Linux
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.deno
|
||||
|
|
@ -136,7 +157,7 @@ steps:
|
|||
### macOS
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.deno
|
||||
|
|
@ -147,7 +168,7 @@ steps:
|
|||
### Windows
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~\.deno
|
||||
|
|
@ -158,7 +179,7 @@ steps:
|
|||
## Elixir - Mix
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
deps
|
||||
|
|
@ -185,7 +206,7 @@ steps:
|
|||
### Linux
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/go-build
|
||||
|
|
@ -198,7 +219,7 @@ steps:
|
|||
### macOS
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/Library/Caches/go-build
|
||||
|
|
@ -211,7 +232,7 @@ steps:
|
|||
### Windows
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~\AppData\Local\go-build
|
||||
|
|
@ -227,7 +248,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||
|
||||
```yaml
|
||||
- name: Cache ~/.cabal/packages, ~/.cabal/store and dist-newstyle
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cabal/packages
|
||||
|
|
@ -242,14 +263,14 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||
### Linux or macOS
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
name: Cache ~/.stack
|
||||
with:
|
||||
path: ~/.stack
|
||||
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-stack-global-
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
name: Cache .stack-work
|
||||
with:
|
||||
path: .stack-work
|
||||
|
|
@ -261,7 +282,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||
### Windows
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
name: Cache %APPDATA%\stack %LOCALAPPDATA%\Programs\stack
|
||||
with:
|
||||
path: |
|
||||
|
|
@ -270,7 +291,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-stack-global-
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
name: Cache .stack-work
|
||||
with:
|
||||
path: .stack-work
|
||||
|
|
@ -284,7 +305,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||
> **Note** Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
|
|
@ -298,7 +319,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||
|
||||
```yaml
|
||||
- name: Cache local Maven repository
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.m2/repository
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
|
|
@ -334,7 +355,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
|
|||
`Get npm cache directory` step can then be used with `actions/cache` as shown below
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
id: npm-cache # use this to check for `cache-hit` ==> if: steps.npm-cache.outputs.cache-hit != 'true'
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir.outputs.dir }}
|
||||
|
|
@ -347,7 +368,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
|
|||
|
||||
```yaml
|
||||
- name: restore lerna
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: '**/node_modules'
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }}
|
||||
|
|
@ -361,7 +382,7 @@ The yarn cache directory will depend on your operating system and version of `ya
|
|||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
|
|
@ -379,7 +400,7 @@ The yarn 2 cache directory will depend on your config. See https://yarnpkg.com/c
|
|||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
|
|
@ -394,7 +415,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
|
|||
```yaml
|
||||
- name: Restore Cache
|
||||
id: restore-cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: _export
|
||||
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
|
||||
|
|
@ -423,7 +444,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
|
|||
id: composer-cache
|
||||
run: |
|
||||
echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.composer-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
|
||||
|
|
@ -444,7 +465,7 @@ Locations:
|
|||
### Simple example
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
|
|
@ -457,7 +478,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||
### Multiple OS's in a workflow
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
|
|
@ -465,7 +486,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
|
|
@ -473,7 +494,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
|
|
@ -499,7 +520,7 @@ jobs:
|
|||
- os: windows-latest
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
steps:
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ matrix.path }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
|
|
@ -513,11 +534,12 @@ jobs:
|
|||
```yaml
|
||||
- name: Get pip cache dir
|
||||
id: pip-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: pip cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
|
|
@ -535,7 +557,7 @@ jobs:
|
|||
|
||||
⋮
|
||||
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-python-${{ steps.setup-python.outputs.python-version }}-pipenv-${{ hashFiles('Pipfile.lock') }}
|
||||
|
|
@ -562,7 +584,7 @@ For renv, the cache directory will vary by OS. The `RENV_PATHS_ROOT` environment
|
|||
cat("##[set-output name=r-version;]", R.Version()$version.string, sep = "")
|
||||
shell: Rscript {0}
|
||||
- name: Restore Renv package cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.RENV_PATHS_ROOT }}
|
||||
key: ${{ steps.get-version.outputs.os-version }}-${{ steps.get-version.outputs.r-version }}-${{ inputs.cache-version }}-${{ hashFiles('renv.lock') }}
|
||||
|
|
@ -588,7 +610,7 @@ whenever possible:
|
|||
## Rust - Cargo
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
|
|
@ -603,7 +625,7 @@ whenever possible:
|
|||
|
||||
```yaml
|
||||
- name: Cache SBT
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.ivy2/cache
|
||||
|
|
@ -614,7 +636,7 @@ whenever possible:
|
|||
## Swift, Objective-C - Carthage
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: Carthage
|
||||
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
|
||||
|
|
@ -625,7 +647,7 @@ whenever possible:
|
|||
## Swift, Objective-C - CocoaPods
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: Pods
|
||||
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
|
||||
|
|
@ -636,7 +658,7 @@ whenever possible:
|
|||
## Swift - Swift Package Manager
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: .build
|
||||
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
|
||||
|
|
@ -651,7 +673,7 @@ env:
|
|||
MINT_PATH: .mint/lib
|
||||
MINT_LINK_PATH: .mint/bin
|
||||
steps:
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: .mint
|
||||
key: ${{ runner.os }}-mint-${{ hashFiles('**/Mintfile') }}
|
||||
|
|
@ -667,7 +689,7 @@ steps:
|
|||
|
||||
```yaml
|
||||
- name: Cache Bazel
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/bazel
|
||||
|
|
@ -681,7 +703,7 @@ steps:
|
|||
|
||||
```yaml
|
||||
- name: Cache Bazel
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
/private/var/tmp/_bazel_runner/
|
||||
|
|
|
|||
4241
package-lock.json
generated
4241
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
17
package.json
17
package.json
|
|
@ -1,11 +1,11 @@
|
|||
{
|
||||
"name": "cache",
|
||||
"version": "4.0.0",
|
||||
"version": "4.2.3",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
"scripts": {
|
||||
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && ncc build -o dist/restore-only src/restoreOnly.ts && ncc build -o dist/save-only src/saveOnly.ts",
|
||||
"build": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && ncc build -o dist/restore-only src/restoreOnly.ts && ncc build -o dist/save-only src/saveOnly.ts",
|
||||
"test": "tsc --noEmit && jest --coverage",
|
||||
"lint": "eslint **/*.ts --cache",
|
||||
"format": "prettier --write **/*.ts",
|
||||
|
|
@ -23,10 +23,13 @@
|
|||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.2.3",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/cache": "^4.0.3",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
"@actions/io": "^1.1.3",
|
||||
"@aws-sdk/client-s3": "^3.511.0",
|
||||
"@aws-sdk/lib-storage": "^3.513.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.513.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.5.2",
|
||||
|
|
@ -34,7 +37,7 @@
|
|||
"@types/node": "^16.18.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"@vercel/ncc": "^0.38.1",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.28.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
|
|
@ -44,7 +47,7 @@
|
|||
"jest": "^28.1.3",
|
||||
"jest-circus": "^27.5.1",
|
||||
"nock": "^13.2.9",
|
||||
"prettier": "^2.8.0",
|
||||
"prettier": "^2.8.8",
|
||||
"ts-jest": "^28.0.8",
|
||||
"typescript": "^4.9.3"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,9 +35,9 @@ If you are using separate jobs to create and save your cache(s) to be reused by
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
- uses: actions/cache/restore@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
@ -64,12 +64,12 @@ In case of multi-module projects, where the built artifact of one project needs
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build
|
||||
run: /build-parent-module.sh
|
||||
|
||||
- uses: actions/cache/save@v3
|
||||
- uses: actions/cache/save@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
@ -80,9 +80,9 @@ steps:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
- uses: actions/cache/restore@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
@ -107,9 +107,9 @@ To fail if there is no cache hit for the primary key, leave `restore-keys` empty
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
- uses: actions/cache/restore@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ inputs:
|
|||
description: 'An explicit key for restoring the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||
description: 'An ordered multiline string listing the prefix-matched keys, that are used for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||
required: false
|
||||
enableCrossOsArchive:
|
||||
description: 'An optional boolean when enabled, allows windows runners to restore caches that were saved on other platforms'
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ If you are using separate jobs for generating common artifacts and sharing them
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Dependencies
|
||||
run: /install.sh
|
||||
|
|
@ -31,7 +31,7 @@ steps:
|
|||
- name: Build artifacts
|
||||
run: /build.sh
|
||||
|
||||
- uses: actions/cache/save@v3
|
||||
- uses: actions/cache/save@v4
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
|
|
@ -47,7 +47,7 @@ Let's say we have a restore step that computes a key at runtime.
|
|||
#### Restore a cache
|
||||
|
||||
```yaml
|
||||
uses: actions/cache/restore@v3
|
||||
uses: actions/cache/restore@v4
|
||||
id: restore-cache
|
||||
with:
|
||||
key: cache-${{ hashFiles('**/lockfiles') }}
|
||||
|
|
@ -55,7 +55,7 @@ with:
|
|||
|
||||
#### Case 1 - Where a user would want to reuse the key as it is
|
||||
```yaml
|
||||
uses: actions/cache/save@v3
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
key: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
```
|
||||
|
|
@ -63,26 +63,54 @@ with:
|
|||
#### Case 2 - Where the user would want to re-evaluate the key
|
||||
|
||||
```yaml
|
||||
uses: actions/cache/save@v3
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
key: npm-cache-${{hashfiles(package-lock.json)}}
|
||||
```
|
||||
|
||||
### Always save cache
|
||||
|
||||
There are instances where some flaky test cases would fail the entire workflow and users would get frustrated because the builds would run for hours and the cache couldn't be saved as the workflow failed in between. For such use-cases, users now have the ability to use the `actions/cache/save` action to save the cache by using an `if: always()` condition. This way the cache will always be saved if generated, or a warning will be generated that nothing is found on the cache path. Users can also use the `if` condition to only execute the `actions/cache/save` action depending on the output of previous steps. This way they get more control of when to save the cache.
|
||||
There are instances where some flaky test cases would fail the entire workflow and users would get frustrated because the builds would run for hours and the cache couldn't be saved as the workflow failed in between.
|
||||
For such use-cases, users now have the ability to use the `actions/cache/save` action to save the cache by using an [`always()`](https://docs.github.com/actions/writing-workflows/choosing-what-your-workflow-does/expressions#always) condition.
|
||||
This way the cache will always be saved if generated, or a warning will be generated that nothing is found on the cache path. Users can also use the `if` condition to only execute the `actions/cache/save` action depending on the output of previous steps. This way they get more control of when to save the cache.
|
||||
|
||||
To avoid saving a cache that already exists, the `cache-hit` output from a restore step should be checked.
|
||||
|
||||
The `cache-primary-key` output from the restore step should also be used to ensure
|
||||
the cache key does not change during the build if it's calculated based on file contents.
|
||||
|
||||
Here's an example where we imagine we're calculating a lot of prime numbers and want to cache them:
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
.
|
||||
. // restore if need be
|
||||
.
|
||||
- name: Build
|
||||
run: /build.sh
|
||||
- uses: actions/cache/save@v3
|
||||
if: always() // or any other condition to invoke the save action
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
name: Always Caching Prime Numbers
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Restore cached Prime Numbers
|
||||
id: cache-prime-numbers-restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: ${{ runner.os }}-prime-numbers
|
||||
path: |
|
||||
path/to/dependencies
|
||||
some/other/dependencies
|
||||
|
||||
# Intermediate workflow steps
|
||||
|
||||
- name: Always Save Prime Numbers
|
||||
id: cache-prime-numbers-save
|
||||
if: always() && steps.cache-prime-numbers-restore.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
key: ${{ steps.cache-prime-numbers-restore.outputs.cache-primary-key }}
|
||||
path: |
|
||||
path/to/dependencies
|
||||
some/other/dependencies
|
||||
```
|
||||
|
|
|
|||
220
src/custom/backend.ts
Normal file
220
src/custom/backend.ts
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
import {
|
||||
S3Client,
|
||||
GetObjectCommand,
|
||||
ListObjectsV2Command
|
||||
} from "@aws-sdk/client-s3";
|
||||
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
|
||||
import { createReadStream } from "fs";
|
||||
import * as crypto from "crypto";
|
||||
import {
|
||||
DownloadOptions,
|
||||
getDownloadOptions
|
||||
} from "@actions/cache/lib/options";
|
||||
import { CompressionMethod } from "@actions/cache/lib/internal/constants";
|
||||
import * as core from "@actions/core";
|
||||
import * as utils from "@actions/cache/lib/internal/cacheUtils";
|
||||
import { Upload } from "@aws-sdk/lib-storage";
|
||||
import { downloadCacheHttpClientConcurrent } from "./downloadUtils";
|
||||
|
||||
export interface ArtifactCacheEntry {
|
||||
cacheKey?: string;
|
||||
scope?: string;
|
||||
cacheVersion?: string;
|
||||
creationTime?: string;
|
||||
archiveLocation?: string;
|
||||
}
|
||||
|
||||
// if executing from RunsOn, unset any existing AWS env variables so that we can use the IAM instance profile for credentials
|
||||
// see unsetCredentials() in https://github.com/aws-actions/configure-aws-credentials/blob/v4.0.2/src/helpers.ts#L44
|
||||
if (process.env.RUNS_ON_RUNNER_NAME && process.env.RUNS_ON_RUNNER_NAME !== "") {
|
||||
delete process.env.AWS_ACCESS_KEY_ID;
|
||||
delete process.env.AWS_SECRET_ACCESS_KEY;
|
||||
delete process.env.AWS_SESSION_TOKEN;
|
||||
delete process.env.AWS_REGION;
|
||||
delete process.env.AWS_DEFAULT_REGION;
|
||||
}
|
||||
|
||||
const versionSalt = "1.0";
|
||||
const bucketName = process.env.RUNS_ON_S3_BUCKET_CACHE;
|
||||
const region =
|
||||
process.env.RUNS_ON_AWS_REGION ||
|
||||
process.env.AWS_REGION ||
|
||||
process.env.AWS_DEFAULT_REGION;
|
||||
const forcePathStyle =
|
||||
process.env.RUNS_ON_S3_FORCE_PATH_STYLE === "true" ||
|
||||
process.env.AWS_S3_FORCE_PATH_STYLE === "true";
|
||||
|
||||
const uploadQueueSize = Number(process.env.UPLOAD_QUEUE_SIZE || "4");
|
||||
const uploadPartSize =
|
||||
Number(process.env.UPLOAD_PART_SIZE || "32") * 1024 * 1024;
|
||||
const downloadQueueSize = Number(process.env.DOWNLOAD_QUEUE_SIZE || "8");
|
||||
const downloadPartSize =
|
||||
Number(process.env.DOWNLOAD_PART_SIZE || "16") * 1024 * 1024;
|
||||
|
||||
const s3Client = new S3Client({ region, forcePathStyle });
|
||||
|
||||
export function getCacheVersion(
|
||||
paths: string[],
|
||||
compressionMethod?: CompressionMethod,
|
||||
enableCrossOsArchive = false
|
||||
): string {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
components.push(compressionMethod);
|
||||
}
|
||||
|
||||
// Only check for windows platforms if enableCrossOsArchive is false
|
||||
if (process.platform === "win32" && !enableCrossOsArchive) {
|
||||
components.push("windows-only");
|
||||
}
|
||||
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
|
||||
return crypto
|
||||
.createHash("sha256")
|
||||
.update(components.join("|"))
|
||||
.digest("hex");
|
||||
}
|
||||
|
||||
function getS3Prefix(
|
||||
paths: string[],
|
||||
{ compressionMethod, enableCrossOsArchive }
|
||||
): string {
|
||||
const repository = process.env.GITHUB_REPOSITORY;
|
||||
const version = getCacheVersion(
|
||||
paths,
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
);
|
||||
|
||||
return ["cache", repository, version].join("/");
|
||||
}
|
||||
|
||||
export async function getCacheEntry(
|
||||
keys,
|
||||
paths,
|
||||
{ compressionMethod, enableCrossOsArchive }
|
||||
) {
|
||||
const cacheEntry: ArtifactCacheEntry = {};
|
||||
|
||||
// Find the most recent key matching one of the restoreKeys prefixes
|
||||
for (const restoreKey of keys) {
|
||||
const s3Prefix = getS3Prefix(paths, {
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
});
|
||||
const listObjectsParams = {
|
||||
Bucket: bucketName,
|
||||
Prefix: [s3Prefix, restoreKey].join("/")
|
||||
};
|
||||
|
||||
try {
|
||||
const { Contents = [] } = await s3Client.send(
|
||||
new ListObjectsV2Command(listObjectsParams)
|
||||
);
|
||||
if (Contents.length > 0) {
|
||||
// Sort keys by LastModified time in descending order
|
||||
const sortedKeys = Contents.sort(
|
||||
(a, b) => Number(b.LastModified) - Number(a.LastModified)
|
||||
);
|
||||
const s3Path = sortedKeys[0].Key; // Return the most recent key
|
||||
cacheEntry.cacheKey = s3Path?.replace(`${s3Prefix}/`, "");
|
||||
cacheEntry.archiveLocation = `s3://${bucketName}/${s3Path}`;
|
||||
return cacheEntry;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error listing objects with prefix ${restoreKey} in bucket ${bucketName}:`,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return cacheEntry; // No keys found
|
||||
}
|
||||
|
||||
export async function downloadCache(
|
||||
archiveLocation: string,
|
||||
archivePath: string,
|
||||
options?: DownloadOptions
|
||||
): Promise<void> {
|
||||
if (!bucketName) {
|
||||
throw new Error("Environment variable RUNS_ON_S3_BUCKET_CACHE not set");
|
||||
}
|
||||
|
||||
if (!region) {
|
||||
throw new Error("Environment variable RUNS_ON_AWS_REGION not set");
|
||||
}
|
||||
|
||||
const archiveUrl = new URL(archiveLocation);
|
||||
const objectKey = archiveUrl.pathname.slice(1);
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: bucketName,
|
||||
Key: objectKey
|
||||
});
|
||||
const url = await getSignedUrl(s3Client, command, {
|
||||
expiresIn: 3600
|
||||
});
|
||||
await downloadCacheHttpClientConcurrent(url, archivePath, {
|
||||
...options,
|
||||
downloadConcurrency: downloadQueueSize,
|
||||
concurrentBlobDownloads: true,
|
||||
partSize: downloadPartSize
|
||||
});
|
||||
}
|
||||
|
||||
export async function saveCache(
|
||||
key: string,
|
||||
paths: string[],
|
||||
archivePath: string,
|
||||
{ compressionMethod, enableCrossOsArchive, cacheSize: archiveFileSize }
|
||||
): Promise<void> {
|
||||
if (!bucketName) {
|
||||
throw new Error("Environment variable RUNS_ON_S3_BUCKET_CACHE not set");
|
||||
}
|
||||
|
||||
if (!region) {
|
||||
throw new Error("Environment variable RUNS_ON_AWS_REGION not set");
|
||||
}
|
||||
|
||||
const s3Prefix = getS3Prefix(paths, {
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
});
|
||||
const s3Key = `${s3Prefix}/${key}`;
|
||||
|
||||
const multipartUpload = new Upload({
|
||||
client: s3Client,
|
||||
params: {
|
||||
Bucket: bucketName,
|
||||
Key: s3Key,
|
||||
Body: createReadStream(archivePath)
|
||||
},
|
||||
// Part size in bytes
|
||||
partSize: uploadPartSize,
|
||||
// Max concurrency
|
||||
queueSize: uploadQueueSize
|
||||
});
|
||||
|
||||
// Commit Cache
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
cacheSize / (1024 * 1024)
|
||||
)} MB (${cacheSize} B)`
|
||||
);
|
||||
|
||||
const totalParts = Math.ceil(cacheSize / uploadPartSize);
|
||||
core.info(`Uploading cache from ${archivePath} to ${bucketName}/${s3Key}`);
|
||||
multipartUpload.on("httpUploadProgress", progress => {
|
||||
core.info(`Uploaded part ${progress.part}/${totalParts}.`);
|
||||
});
|
||||
|
||||
await multipartUpload.done();
|
||||
core.info(`Cache saved successfully.`);
|
||||
}
|
||||
236
src/custom/cache.ts
Normal file
236
src/custom/cache.ts
Normal file
|
|
@ -0,0 +1,236 @@
|
|||
// https://github.com/actions/toolkit/blob/%40actions/cache%403.2.2/packages/cache/src/cache.ts
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as path from "path";
|
||||
import * as utils from "@actions/cache/lib/internal/cacheUtils";
|
||||
import * as cacheHttpClient from "./backend";
|
||||
import {
|
||||
createTar,
|
||||
extractTar,
|
||||
listTar
|
||||
} from "@actions/cache/lib/internal/tar";
|
||||
import { DownloadOptions, UploadOptions } from "@actions/cache/lib/options";
|
||||
|
||||
export class ValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "ValidationError";
|
||||
Object.setPrototypeOf(this, ValidationError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
export class ReserveCacheError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "ReserveCacheError";
|
||||
Object.setPrototypeOf(this, ReserveCacheError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
function checkPaths(paths: string[]): void {
|
||||
if (!paths || paths.length === 0) {
|
||||
throw new ValidationError(
|
||||
`Path Validation Error: At least one directory or file path is required`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function checkKey(key: string): void {
|
||||
if (key.length > 512) {
|
||||
throw new ValidationError(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
);
|
||||
}
|
||||
const regex = /^[^,]*$/;
|
||||
if (!regex.test(key)) {
|
||||
throw new ValidationError(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* isFeatureAvailable to check the presence of Actions cache service
|
||||
*
|
||||
* @returns boolean return true if Actions cache service feature is available, otherwise false
|
||||
*/
|
||||
|
||||
export function isFeatureAvailable(): boolean {
|
||||
return !!process.env["ACTIONS_CACHE_URL"];
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
export async function restoreCache(
|
||||
paths: string[],
|
||||
primaryKey: string,
|
||||
restoreKeys?: string[],
|
||||
options?: DownloadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<string | undefined> {
|
||||
checkPaths(paths);
|
||||
|
||||
restoreKeys = restoreKeys || [];
|
||||
const keys = [primaryKey, ...restoreKeys];
|
||||
|
||||
core.debug("Resolved Keys:");
|
||||
core.debug(JSON.stringify(keys));
|
||||
|
||||
if (keys.length > 10) {
|
||||
throw new ValidationError(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
);
|
||||
}
|
||||
for (const key of keys) {
|
||||
checkKey(key);
|
||||
}
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod();
|
||||
let archivePath = "";
|
||||
try {
|
||||
// path are needed to compute version
|
||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
});
|
||||
if (!cacheEntry?.archiveLocation) {
|
||||
// Cache not found
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (options?.lookupOnly) {
|
||||
core.info("Lookup only - skipping download");
|
||||
return cacheEntry.cacheKey;
|
||||
}
|
||||
|
||||
archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
);
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
|
||||
// Download the cache from the cache entry
|
||||
await cacheHttpClient.downloadCache(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath,
|
||||
options
|
||||
);
|
||||
|
||||
if (core.isDebug()) {
|
||||
await listTar(archivePath, compressionMethod);
|
||||
}
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
)} MB (${archiveFileSize} B)`
|
||||
);
|
||||
|
||||
await extractTar(archivePath, compressionMethod);
|
||||
core.info("Cache restored successfully");
|
||||
|
||||
return cacheEntry.cacheKey;
|
||||
} catch (error) {
|
||||
const typedError = error as Error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
} else {
|
||||
// Supress all non-validation cache related errors because caching should be optional
|
||||
core.warning(`Failed to restore: ${(error as Error).message}`);
|
||||
}
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
await utils.unlinkFile(archivePath);
|
||||
} catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves a list of files with the specified key
|
||||
*
|
||||
* @param paths a list of file paths to be cached
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @param options cache upload options
|
||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||
*/
|
||||
export async function saveCache(
|
||||
paths: string[],
|
||||
key: string,
|
||||
options?: UploadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<number> {
|
||||
checkPaths(paths);
|
||||
checkKey(key);
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod();
|
||||
let cacheId = -1;
|
||||
|
||||
const cachePaths = await utils.resolvePaths(paths);
|
||||
core.debug("Cache Paths:");
|
||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||
|
||||
if (cachePaths.length === 0) {
|
||||
throw new Error(
|
||||
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
|
||||
);
|
||||
}
|
||||
|
||||
const archiveFolder = await utils.createTempDirectory();
|
||||
const archivePath = path.join(
|
||||
archiveFolder,
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
);
|
||||
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
|
||||
try {
|
||||
await createTar(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
await listTar(archivePath, compressionMethod);
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
|
||||
await cacheHttpClient.saveCache(key, paths, archivePath, {
|
||||
compressionMethod,
|
||||
enableCrossOsArchive,
|
||||
cacheSize: archiveFileSize
|
||||
});
|
||||
|
||||
// dummy cacheId, if we get there without raising, it means the cache has been saved
|
||||
cacheId = 1;
|
||||
} catch (error) {
|
||||
const typedError = error as Error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
} else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`);
|
||||
} else {
|
||||
core.warning(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
await utils.unlinkFile(archivePath);
|
||||
} catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return cacheId;
|
||||
}
|
||||
332
src/custom/downloadUtils.ts
Normal file
332
src/custom/downloadUtils.ts
Normal file
|
|
@ -0,0 +1,332 @@
|
|||
// Just a copy of the original file from the toolkit/actions/cache repository, with a change for byte range used in the downloadCacheHttpClientConcurrent function.
|
||||
import * as core from "@actions/core";
|
||||
import { HttpClient } from "@actions/http-client";
|
||||
import { TransferProgressEvent } from "@azure/ms-rest-js";
|
||||
import * as fs from "fs";
|
||||
import { DownloadOptions } from "@actions/cache/lib/options";
|
||||
import { retryHttpClientResponse } from "@actions/cache/lib/internal/requestUtils";
|
||||
|
||||
export interface RunsOnDownloadOptions extends DownloadOptions {
|
||||
partSize: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for tracking the download state and displaying stats.
|
||||
*/
|
||||
export class DownloadProgress {
|
||||
contentLength: number;
|
||||
segmentIndex: number;
|
||||
segmentSize: number;
|
||||
segmentOffset: number;
|
||||
receivedBytes: number;
|
||||
startTime: number;
|
||||
displayedComplete: boolean;
|
||||
timeoutHandle?: ReturnType<typeof setTimeout>;
|
||||
|
||||
constructor(contentLength: number) {
|
||||
this.contentLength = contentLength;
|
||||
this.segmentIndex = 0;
|
||||
this.segmentSize = 0;
|
||||
this.segmentOffset = 0;
|
||||
this.receivedBytes = 0;
|
||||
this.displayedComplete = false;
|
||||
this.startTime = Date.now();
|
||||
}
|
||||
|
||||
/**
|
||||
* Progress to the next segment. Only call this method when the previous segment
|
||||
* is complete.
|
||||
*
|
||||
* @param segmentSize the length of the next segment
|
||||
*/
|
||||
nextSegment(segmentSize: number): void {
|
||||
this.segmentOffset = this.segmentOffset + this.segmentSize;
|
||||
this.segmentIndex = this.segmentIndex + 1;
|
||||
this.segmentSize = segmentSize;
|
||||
this.receivedBytes = 0;
|
||||
|
||||
core.debug(
|
||||
`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the number of bytes received for the current segment.
|
||||
*
|
||||
* @param receivedBytes the number of bytes received
|
||||
*/
|
||||
setReceivedBytes(receivedBytes: number): void {
|
||||
this.receivedBytes = receivedBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the total number of bytes transferred.
|
||||
*/
|
||||
getTransferredBytes(): number {
|
||||
return this.segmentOffset + this.receivedBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the download is complete.
|
||||
*/
|
||||
isDone(): boolean {
|
||||
return this.getTransferredBytes() === this.contentLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the current download stats. Once the download completes, this will print one
|
||||
* last line and then stop.
|
||||
*/
|
||||
display(): void {
|
||||
if (this.displayedComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const transferredBytes = this.segmentOffset + this.receivedBytes;
|
||||
const percentage = (
|
||||
100 *
|
||||
(transferredBytes / this.contentLength)
|
||||
).toFixed(1);
|
||||
const elapsedTime = Date.now() - this.startTime;
|
||||
const downloadSpeed = (
|
||||
transferredBytes /
|
||||
(1024 * 1024) /
|
||||
(elapsedTime / 1000)
|
||||
).toFixed(1);
|
||||
|
||||
core.info(
|
||||
`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`
|
||||
);
|
||||
|
||||
if (this.isDone()) {
|
||||
this.displayedComplete = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function used to handle TransferProgressEvents.
|
||||
*/
|
||||
onProgress(): (progress: TransferProgressEvent) => void {
|
||||
return (progress: TransferProgressEvent) => {
|
||||
this.setReceivedBytes(progress.loadedBytes);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the timer that displays the stats.
|
||||
*
|
||||
* @param delayInMs the delay between each write
|
||||
*/
|
||||
startDisplayTimer(delayInMs = 1000): void {
|
||||
const displayCallback = (): void => {
|
||||
this.display();
|
||||
|
||||
if (!this.isDone()) {
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
||||
}
|
||||
};
|
||||
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the timer that displays the stats. As this typically indicates the download
|
||||
* is complete, this will display one last line, unless the last line has already
|
||||
* been written.
|
||||
*/
|
||||
stopDisplayTimer(): void {
|
||||
if (this.timeoutHandle) {
|
||||
clearTimeout(this.timeoutHandle);
|
||||
this.timeoutHandle = undefined;
|
||||
}
|
||||
|
||||
this.display();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
export async function downloadCacheHttpClientConcurrent(
|
||||
archiveLocation: string,
|
||||
archivePath: fs.PathLike,
|
||||
options: RunsOnDownloadOptions
|
||||
): Promise<void> {
|
||||
const archiveDescriptor = await fs.promises.open(archivePath, "w");
|
||||
const httpClient = new HttpClient("actions/cache", undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = await retryHttpClientResponse(
|
||||
"downloadCacheMetadata",
|
||||
async () =>
|
||||
await httpClient.request("GET", archiveLocation, null, {
|
||||
Range: "bytes=0-1"
|
||||
})
|
||||
);
|
||||
|
||||
const contentRange = res.message.headers["content-range"];
|
||||
if (!contentRange) {
|
||||
throw new Error("Range request not supported by server");
|
||||
}
|
||||
const match = contentRange?.match(/bytes \d+-\d+\/(\d+)/);
|
||||
if (!match) {
|
||||
throw new Error(
|
||||
"Content-Range header in server response not in correct format"
|
||||
);
|
||||
}
|
||||
const length = parseInt(match[1]);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
|
||||
const downloads: {
|
||||
offset: number;
|
||||
promiseGetter: () => Promise<DownloadSegment>;
|
||||
}[] = [];
|
||||
|
||||
const blockSize = options.partSize;
|
||||
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: async () => {
|
||||
return await downloadSegmentRetry(
|
||||
httpClient,
|
||||
archiveLocation,
|
||||
offset,
|
||||
count
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
|
||||
const activeDownloads: { [offset: number]: Promise<DownloadSegment> } =
|
||||
[];
|
||||
let nextDownload:
|
||||
| { offset: number; promiseGetter: () => Promise<DownloadSegment> }
|
||||
| undefined;
|
||||
|
||||
const waitAndWrite: () => Promise<void> = async () => {
|
||||
const segment = await Promise.race(Object.values(activeDownloads));
|
||||
await archiveDescriptor.write(
|
||||
segment.buffer,
|
||||
0,
|
||||
segment.count,
|
||||
segment.offset
|
||||
);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
};
|
||||
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
|
||||
if (actives >= (options.downloadConcurrency ?? 10)) {
|
||||
await waitAndWrite();
|
||||
}
|
||||
}
|
||||
|
||||
while (actives > 0) {
|
||||
await waitAndWrite();
|
||||
}
|
||||
} finally {
|
||||
httpClient.dispose();
|
||||
await archiveDescriptor.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadSegmentRetry(
|
||||
httpClient: HttpClient,
|
||||
archiveLocation: string,
|
||||
offset: number,
|
||||
count: number
|
||||
): Promise<DownloadSegment> {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = await promiseWithTimeout(
|
||||
timeout,
|
||||
downloadSegment(httpClient, archiveLocation, offset, count)
|
||||
);
|
||||
if (typeof result === "string") {
|
||||
throw new Error("downloadSegmentRetry failed due to timeout");
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadSegment(
|
||||
httpClient: HttpClient,
|
||||
archiveLocation: string,
|
||||
offset: number,
|
||||
count: number
|
||||
): Promise<DownloadSegment> {
|
||||
const partRes = await retryHttpClientResponse(
|
||||
"downloadCachePart",
|
||||
async () =>
|
||||
await httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
})
|
||||
);
|
||||
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error(
|
||||
"Expected HttpClientResponse to implement readBodyBuffer"
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: await partRes.readBodyBuffer()
|
||||
};
|
||||
}
|
||||
|
||||
declare class DownloadSegment {
|
||||
offset: number;
|
||||
count: number;
|
||||
buffer: Buffer;
|
||||
}
|
||||
|
||||
const promiseWithTimeout = async <T>(
|
||||
timeoutMs: number,
|
||||
promise: Promise<T>
|
||||
): Promise<T | string> => {
|
||||
let timeoutHandle: NodeJS.Timeout;
|
||||
const timeoutPromise = new Promise<string>(resolve => {
|
||||
timeoutHandle = setTimeout(() => resolve("timeout"), timeoutMs);
|
||||
});
|
||||
|
||||
return Promise.race([promise, timeoutPromise]).then(result => {
|
||||
clearTimeout(timeoutHandle);
|
||||
return result;
|
||||
});
|
||||
};
|
||||
|
|
@ -9,8 +9,12 @@ import {
|
|||
} from "./stateProvider";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
import * as custom from "./custom/cache";
|
||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||
|
||||
export async function restoreImpl(
|
||||
stateProvider: IStateProvider
|
||||
stateProvider: IStateProvider,
|
||||
earlyExit?: boolean | undefined
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
|
|
@ -41,15 +45,32 @@ export async function restoreImpl(
|
|||
const failOnCacheMiss = utils.getInputAsBool(Inputs.FailOnCacheMiss);
|
||||
const lookupOnly = utils.getInputAsBool(Inputs.LookupOnly);
|
||||
|
||||
const cacheKey = await cache.restoreCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
restoreKeys,
|
||||
{ lookupOnly: lookupOnly },
|
||||
enableCrossOsArchive
|
||||
);
|
||||
let cacheKey: string | undefined;
|
||||
|
||||
if (canSaveToS3) {
|
||||
core.info(
|
||||
"The cache action detected a local S3 bucket cache. Using it."
|
||||
);
|
||||
cacheKey = await custom.restoreCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
restoreKeys,
|
||||
{ lookupOnly: lookupOnly }
|
||||
);
|
||||
} else {
|
||||
cacheKey = await cache.restoreCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
restoreKeys,
|
||||
{ lookupOnly: lookupOnly },
|
||||
enableCrossOsArchive
|
||||
);
|
||||
}
|
||||
|
||||
if (!cacheKey) {
|
||||
// `cache-hit` is intentionally not set to `false` here to preserve existing behavior
|
||||
// See https://github.com/actions/cache/issues/1466
|
||||
|
||||
if (failOnCacheMiss) {
|
||||
throw new Error(
|
||||
`Failed to restore cache entry. Exiting as fail-on-cache-miss is set. Input key: ${primaryKey}`
|
||||
|
|
@ -61,7 +82,6 @@ export async function restoreImpl(
|
|||
...restoreKeys
|
||||
].join(", ")}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -83,6 +103,9 @@ export async function restoreImpl(
|
|||
return cacheKey;
|
||||
} catch (error: unknown) {
|
||||
core.setFailed((error as Error).message);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -90,14 +113,7 @@ async function run(
|
|||
stateProvider: IStateProvider,
|
||||
earlyExit: boolean | undefined
|
||||
): Promise<void> {
|
||||
try {
|
||||
await restoreImpl(stateProvider);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
await restoreImpl(stateProvider, earlyExit);
|
||||
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
|
|
|
|||
|
|
@ -9,6 +9,9 @@ import {
|
|||
} from "./stateProvider";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
import * as custom from "./custom/cache";
|
||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
// throw an uncaught exception. Instead of failing this action, just warn.
|
||||
|
|
@ -62,12 +65,29 @@ export async function saveImpl(
|
|||
Inputs.EnableCrossOsArchive
|
||||
);
|
||||
|
||||
cacheId = await cache.saveCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
{ uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize) },
|
||||
enableCrossOsArchive
|
||||
);
|
||||
if (canSaveToS3) {
|
||||
core.info(
|
||||
"The cache action detected a local S3 bucket cache. Using it."
|
||||
);
|
||||
|
||||
cacheId = await custom.saveCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
{
|
||||
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
||||
},
|
||||
enableCrossOsArchive
|
||||
);
|
||||
} else {
|
||||
cacheId = await cache.saveCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
{
|
||||
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
||||
},
|
||||
enableCrossOsArchive
|
||||
);
|
||||
}
|
||||
|
||||
if (cacheId != -1) {
|
||||
core.info(`Cache saved with key: ${primaryKey}`);
|
||||
|
|
|
|||
|
|
@ -7,7 +7,13 @@ export function isGhes(): boolean {
|
|||
const ghUrl = new URL(
|
||||
process.env["GITHUB_SERVER_URL"] || "https://github.com"
|
||||
);
|
||||
return ghUrl.hostname.toUpperCase() !== "GITHUB.COM";
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||
const isGitHubHost = hostname === "GITHUB.COM";
|
||||
const isGitHubEnterpriseCloudHost = hostname.endsWith(".GHE.COM");
|
||||
const isLocalHost = hostname.endsWith(".LOCALHOST");
|
||||
|
||||
return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost;
|
||||
}
|
||||
|
||||
export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ A cache today is immutable and cannot be updated. But some use cases require the
|
|||
|
||||
```yaml
|
||||
- name: update cache on every commit
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: prime-numbers
|
||||
key: primes-${{ runner.os }}-${{ github.run_id }} # Can use time based key as well
|
||||
|
|
@ -37,9 +37,8 @@ From `v3.2.3` cache is cross-os compatible when `enableCrossOsArchive` input is
|
|||
|
||||
## Force deletion of caches overriding default cache eviction policy
|
||||
|
||||
Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing. In order to achieve this, [gh-actions-cache cli](https://github.com/actions/gh-actions-cache/) can be used to delete caches for specific branches.
|
||||
Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing.
|
||||
|
||||
This workflow uses `gh-actions-cache` to delete all the caches created by a branch.
|
||||
<details>
|
||||
<summary>Example</summary>
|
||||
|
||||
|
|
@ -60,29 +59,23 @@ jobs:
|
|||
actions: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH=refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
|
||||
echo "Fetching list of cache key"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
|
||||
cacheKeysForPR=$(gh cache list --ref $BRANCH --limit 100 --json id --jq '.[].id')
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
gh cache delete $cacheKey
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
BRANCH: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
```
|
||||
|
||||
</details>
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user