Compare commits

..

100 Commits

Author SHA1 Message Date
Dave Hadka
c8d75a8073 Use tries on all API calls 2020-05-10 10:36:06 -04:00
Dave Hadka
a8b61326cf Disable zstd on Windows due to issue #301 2020-05-08 16:09:03 -04:00
Dave Hadka
25b1a139de Revert "Test disabling concurrency"
This reverts commit 6efe05572d.
2020-05-08 15:59:00 -04:00
Dave Hadka
6efe05572d Test disabling concurrency 2020-05-08 12:05:32 -04:00
Dave Hadka
aced43a650 Fix uploadChunk and add generic retry method 2020-05-08 11:37:53 -04:00
Justin Hutchings
ce9276c90e Add CodeQL Analysis workflow (#283)
* Add CodeQL Analysis workflow

* Rename .github/workflows/workflows/codeql.yml to .github/workflows/codeql.yml

* Clean up commented out stuff
2020-05-05 17:28:32 -04:00
Aiqiao Yan
9eb452c280 Merge pull request #270 from actions/users/aiyan/zstd
Prefer zstd over gzip
2020-05-04 10:39:28 -04:00
Aiqiao Yan
75cd46ec0c Use 30 as the long distance matching window to support both 32-bit and 64-bit OS 2020-05-01 14:25:15 -04:00
Aiqiao Yan
a5d9a3b1a6 Address PR feedback 2020-05-01 10:01:43 -04:00
Aiqiao Yan
97f7baa910 Use zstd instead of gzip if available
Add zstd to cache versioning
2020-04-30 14:40:17 -04:00
dependabot[bot]
9ceee97d99 Bump @actions/http-client from 1.0.6 to 1.0.8 (#286)
Bumps [@actions/http-client](https://github.com/actions/http-client) from 1.0.6 to 1.0.8.
- [Release notes](https://github.com/actions/http-client/releases)
- [Changelog](https://github.com/actions/http-client/blob/master/RELEASES.md)
- [Commits](https://github.com/actions/http-client/commits)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-04-29 18:10:58 -04:00
Hugo van Kemenade
ccf9619480 Add Python example using 'pip cache dir' to get cache location (#285)
* Fix existing example

* Add Python example using 'pip cache dir' to get cache location

* Let users decide how they install pip 20.1+
2020-04-29 14:58:19 -04:00
David Hadka
9f07ee13de Merge pull request #284 from actions/promisify-pipeline
Better error handling during download
2020-04-29 13:50:12 -05:00
Dave Hadka
1ed0c23029 Use promisify of stream.pipeline for downloading 2020-04-29 13:24:26 -04:00
David Hadka
54626c4a4f Merge pull request #269 from actions/socket-timeout
Adds socket timeout and validate file size
2020-04-29 12:21:27 -05:00
Dave Hadka
48b62c1c52 Add comment for SocketTimeout 2020-04-28 21:31:41 -04:00
Dave Hadka
9bb13c71ec Fix lint issue, build .js files 2020-04-22 18:35:16 -04:00
Dave Hadka
8b2a57849f Adds socket timeout and validate file size 2020-04-22 18:23:41 -04:00
Matija Čupić
f00dedfa6c Use checkout@v2 in README example (#258) 2020-04-16 11:50:47 -04:00
Aiqiao Yan
12b87469d4 Merge pull request #252 from actions/users/aiyan/fallback-to-gnu-tar
Fallback to GNU tar if BSD tar is unavailable on windows machine
2020-04-13 13:32:01 -04:00
Aiqiao Yan
52046d1409 Use path.sep in path replace 2020-04-13 12:20:27 -04:00
Aiqiao Yan
08438313d5 Fix macOs-latest test 2020-04-10 15:50:35 -04:00
Aiqiao Yan
7ccdf5c70d Rebase and rebuild 2020-04-10 15:34:34 -04:00
Aiqiao Yan
306f72536b Fix test 2020-04-10 15:33:43 -04:00
Aiqiao Yan
4fa017f2b7 Fallback to GNU tar if BSD tar is unavailable 2020-04-10 15:33:43 -04:00
David Hadka
78809b91d7 Merge pull request #250 from actions/test-relative-path
Fix caching directories outside of the working directory (relative paths)
2020-04-08 10:37:26 -05:00
Josh Gross
a4e3c3b64e Add -P flag for tar creation 2020-04-08 10:58:38 -04:00
Josh Gross
e5370355e6 Combine relative jobs into main test jobs 2020-04-08 10:52:52 -04:00
David Hadka
0e86d5c038 Update workflow.yml 2020-04-07 23:41:38 -04:00
David Hadka
2ba9edf492 Fix job names v2 2020-04-07 23:37:50 -04:00
David Hadka
f15bc7a0d9 Fix job names 2020-04-07 23:33:13 -04:00
David Hadka
b6b8aa78d8 Update workflow.yml 2020-04-07 23:31:27 -04:00
David Hadka
272268544c Add path argument to verify-cache-files.sh 2020-04-07 23:30:01 -04:00
David Hadka
64f8769515 Add path argument to create-cache-files.sh 2020-04-07 23:29:07 -04:00
David Hadka
4a724707e9 Add test for relative paths 2020-04-07 23:28:05 -04:00
Josh Gross
f60097cd16 Fix Lerna Example (#242)
* Fix lerna example

* Fix yaml spacing
2020-04-02 10:35:07 -04:00
Ethan Dennis
eb78578266 Cache multiple paths and add glob pattern support (#212)
* Allow for multiple line-delimited paths to cache

* Add initial minimatch support

* Use @actions/glob for pattern matching

* Cache multiple entries using --files-from tar input

remove known failing test

Quote tar paths

Add salt to test cache

Try reading input files from manifest

bump salt

Run test on macos

more testing

Run caching tests on 3 platforms

Run tests on self-hosted

Apparently cant reference hosted runners by name

Bump salt

wait for some time after save

more timing out

smarter waiting

Cache in tmp dir that won't be deleted

Use child_process instead of actions/exec

Revert tempDir hack

bump salt

more logging

More console logging

Use filepath to with cacheHttpClient

Test cache restoration

Revert temp dir hack

debug logging

clean up cache.yml testing

Bump salt

change debug output

build actions

* unit test coverage for caching multiple dirs

* Ensure there's a locateable test folder at homedir

* Clean up code

* Version cache with all inputs

* Unit test getCacheVersion

* Include keys in getCacheEntry request

* Clean import orders

* Use fs promises in actionUtils tests

* Update import order for to fix linter errors

* Fix remaining linter error

* Remove platform-specific test code

* Add lerna example for caching multiple dirs

* Lerna example updated to v2

Co-Authored-By: Josh Gross <joshmgross@github.com>

Co-authored-by: Josh Gross <joshmgross@github.com>
2020-03-20 16:02:11 -04:00
Dylan Musil
22d71e33ad Update Node Windows example to find the npm cache (#223) 2020-03-18 22:05:56 -04:00
BSKY
b13df3fa54 Update README.md (#213) 2020-03-18 09:44:24 -04:00
Henry Mercer
cae64ca3cd Attempt to delete the archive after extraction (#209)
This reduces storage space used once the Action has finished executing.
2020-03-18 09:43:56 -04:00
Jeremy Loy
af8651e0c5 Include Kotlinscript Gradle files (#216)
Tested this with my own repo which uses a mix of `build.gradle` and `build.gradle.kts` files and this glob seems to be working correctly.

As an aside, please checkout #215 as it would make the process of verifying these globs easier!
2020-03-18 09:40:55 -04:00
BSKY
6c471ae9f6 Add eslint-plugin-simple-import-sort (#219)
* Add eslint-plugin-simple-import-sort

* Update .eslintrc.json

* eslint --fix
2020-03-18 09:35:13 -04:00
Josh Gross
206172ea8e npm audit fix (#221) 2020-03-18 09:31:59 -04:00
dependabot[bot]
5833d5c131 Bump acorn from 5.7.3 to 5.7.4 (#214)
Bumps [acorn](https://github.com/acornjs/acorn) from 5.7.3 to 5.7.4.
- [Release notes](https://github.com/acornjs/acorn/releases)
- [Commits](https://github.com/acornjs/acorn/compare/5.7.3...5.7.4)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-03-17 16:12:29 -04:00
Sacha Ayoun
826785142a Adding examples for OCaml/esy (#199)
* Adding examples for esy as a workflow for OCaml files

* track v1 instead of v1.1.2

Co-Authored-By: Josh Gross <joshmgross@github.com>

* add link in the readme for ocaml-esy

* ocaml -> ocaml/reason

* link in readme says ocaml/reason

Co-authored-by: Josh Gross <joshmgross@github.com>
2020-02-26 17:43:11 -05:00
Carlos Guerrero
8e9c167fd7 Small message change (#195)
* Small message change

Remove dot that generates confusion in wether that's part of the key or not

* Fix format-check

* Update tests
2020-02-25 14:16:36 -05:00
Kipras Melnikovas
e8230b28a9 Use different IDs for 1) getting the directory of yarn cache 2) the cache itself (#178)
* Use different IDs for 1) getting the directory of yarn cache 2) the cache itself

Using the current example + https://github.com/actions/cache#skipping-steps-based-on-cache-hit,

I came to a wrong conclusion that I could skip a step
if the `cache-hit` was `true` -
the ID I used was from the wrong step -
the `get yarn cache directory` step,
instead of the `get yarn cache itself` step.

I've updated the example in hopes that it'll be clearer for others aswell!

Signed-off-by: Kipras Melnikovas <kipras@kipras.org>

* Explain which ID to use for `cache-hit` in yarn's example

Signed-off-by: Kipras Melnikovas <kipras@kipras.org>
2020-02-14 09:50:11 -05:00
eric sciple
4944275b95 test e2e during workflow (#185) 2020-02-13 12:38:56 -05:00
Josh Gross
78a4b2143b Bump version to 1.1.2 2020-02-05 10:40:53 -05:00
Josh Gross
4dc4b4e758 Change name back to Cache 2020-02-05 10:39:52 -05:00
Josh Gross
85aee6a487 Update docs with 5GB limit 2020-02-05 10:33:21 -05:00
Josh Gross
fab26f3f4f Bump version to 1.1.1 2020-02-05 09:55:35 -05:00
David Kale
4887979af8 proxy support (#166)
* Replace typed rest client with new http-client

* Send Content-Type: application/json and fix up some types

* Lint

* Consume @actions/http-client:1.0.5

* Consume @actions/http-client:1.0.6

* Dont send headers manually, http-client automatically will
2020-02-05 09:24:37 -05:00
David Hadka
f9c9166ecb Increase cache limit to 5 GBs (#168)
* Increase cache limit to 5 GBs

* Fix test to use new size limit

* Update src/save.ts

Co-Authored-By: Josh Gross <joshmgross@github.com>

Co-authored-by: Josh Gross <joshmgross@github.com>
2020-02-01 16:11:02 -05:00
Josh Gross
23e301d35c Disable fail-fast to get full coverage of failures 2020-01-29 20:34:56 -05:00
Florian Kostenzer
e43776276f Add Swift Package Manager (SPM) example (#159)
* Add Swift - SPM to examples

* Add link SPM example link to readme

* remove extra newline

* remove another extra newline
2020-01-29 11:13:59 -05:00
Shinya Uryu
b6d538e2aa Add renv examples (#151)
* Add renv examples

* Add link in main readme.md
2020-01-21 19:22:40 -05:00
Edward Thomson
296374f6c9 Update action's description (#75)
* README: clarify case on the action

* Update description
2020-01-14 10:11:41 -05:00
Mike Coutermarsh
6c11532937 Update Ruby docs. "Gem" -> "Bundler" (#150)
* Use "Bundler" which is the package manager

"Gem" isn't wrong, but not typically what a Ruby developer would think of.

* Update links

* Update links
2020-01-12 18:48:43 -05:00
Daniel Shuy
c33bff8d72 Add Scala - SBT example (#134)
* Add Scala - SBT example

* Add Scala - SBT example to README
2020-01-10 17:09:06 -05:00
Flavio Corpa
d1991bb4c5 Add Haskell - Cabal example (#148)
* Add Haskell - Cabal example

* Add link in main readme.md
2020-01-10 17:07:52 -05:00
Josh Gross
60e292adf7 Update cache limits (#140) 2020-01-07 15:01:47 -05:00
Josh Gross
c262ac0154 Fix number parsing issues 2020-01-06 14:06:24 -05:00
Josh Gross
1da52de10f npm audit fix 2020-01-06 13:31:03 -05:00
Josh Gross
b45d91cc4b Chunked Cache Upload APIs (#128)
* Initial pass at chunked upload apis

* Fix cacheEntry type

* Linting

* Fix download cache entry tests

* Linting tests

* Pull in fixes from testing branch

* Fix typo in ReserveCacheResponse

* Add test convering reserve cache failure

* Add retries to upload chunk

* PR feedback

* Format default chunk size

* Remove responses array
2020-01-06 13:05:50 -05:00
Kevin Burke
a631fadf14 README.md: fix grammar error (#136)
"it's" is short for "it is," but the use in this sentence is as a
possessive - something belonging to "it" - hence, "its" is correct.
2019-12-23 10:30:34 -05:00
Chris Patterson
e223b0a12d Merge pull request #124 from nogic1008/patch-1
Add Another C# Example to use personal cache folder
2019-12-16 10:24:25 -05:00
Nogic
decbafc350 Update examples.md
Co-Authored-By: Chris Patterson <chrispat@github.com>
2019-12-16 09:45:29 +09:00
Josh Gross
3854a40aee Use BSD tar on windows (#126)
* Use BSD tar on windows

* Linting

* Fallback to which tar if no system tar

* Fix formatting

* Bump prettier and typescript
2019-12-13 17:24:37 -05:00
Nogic
0188dffc5a Revert original C# Example
* Treat "Use Personal Cache Folder" way as another C# example
* Describe the situation in which another example should be used
2019-12-13 10:03:43 +09:00
Nogic
002d3a77f4 Use Personal Cache Folder in C# Example
Ref: #115
2019-12-10 09:21:47 +09:00
Jon Pugh
4809f4ada4 Add list of implementation examples. (#116)
More visibility into the samples by having it on the main README. Easier to see, better SEO.
2019-12-07 18:25:23 -05:00
Evan Cloutier
3d01b4eb53 Update Ruby example in documentation to specify bundler path (#113)
* Update Ruby example to specify bundler path

* Fix spacing
2019-11-23 14:13:50 -05:00
Josh Gross
95c1798369 Remove validation failures and warning annotations (#108)
* Update warnings behavior

* Add void return type
2019-11-21 14:37:54 -05:00
Josh Gross
639f9d8b81 Mask download URL in logs (#110) 2019-11-21 14:37:32 -05:00
Josh Gross
d9fe1b81f9 Release 1.0.2 2019-11-19 11:55:11 -05:00
Josh Gross
92ae3b63f8 Update badge link 2019-11-15 15:04:12 -05:00
Josh Gross
84b3b283f0 Await io mkdirP (#100) 2019-11-15 10:25:57 -05:00
Josh Gross
8d14a2150b Add unit tests for save (#98)
* Clean up args and arrange imports

* Arrange args in restore tests

* Add unit tests for save

* Use const instead of let (linting)
2019-11-14 17:14:16 -05:00
Josh Gross
c0584c42d1 Add unit tests for actionUtils (#93)
* Add unit tests for actionUtils

* Fix file size on ubuntu and test name

* Remove unused import
2019-11-13 16:13:00 -05:00
Josh Gross
bb828da54c Format cache size and display on info (#85) 2019-11-13 11:00:46 -05:00
Josh Gross
7e7aef2963 Add pip examples (#86) 2019-11-13 10:55:05 -05:00
Josh Gross
b7d83b4095 Provide better errors for unsupported event types (#68)
* Validate event type during restore

* PR Feedback

* Format

* Linting
2019-11-13 10:54:39 -05:00
Josh Gross
50a2fdee6f Update yarn cache example (#70)
* Update yarn cache example

* Update examples.md

Co-Authored-By: Eric Taylor <erictaylor89@gmail.com>
2019-11-13 10:18:47 -05:00
Josh Gross
f0cbadd748 Use cache in workflows (#90) 2019-11-12 17:48:19 -05:00
Josh Gross
4657a5f525 Fix lint on Windows (#89) 2019-11-12 17:01:15 -05:00
BSKY
fb50aa45ec Add initial eslint setup (#88) 2019-11-12 16:48:02 -05:00
BSKY
31508256ff Update README.md (#76) 2019-11-12 15:33:22 -05:00
Josh Gross
bc821d0c12 Remove recommendation to cache node_modules (#69)
* Update npm caching examples

* Fix output name

* Remove extra details tag
2019-11-07 21:04:46 -05:00
Josh Gross
bde557aefd Fix PR filters 2019-11-07 20:02:06 -05:00
Josh Gross
4b0709a0d5 Add unit tests for restore (#62)
* Move archive file size to utils

* Disable net connect with nock

* Add unit tests for restore

* Fix test names and test URL
2019-11-06 13:41:45 -05:00
Kai Neuwerth
ecf6eea708 Add PHP Composer example (#32) 2019-11-05 16:18:49 -05:00
Josh Gross
eb10706a9d Bump version to v1.0.1 and audit fix 2019-11-05 15:40:20 -05:00
Birunthan Mohanathas
30524a6fbd Tweak 'Cache not found' message (#54)
Previously the message was like this:

```
Cache not found for input keys: ["xxx",""]
```

Note the empty entry at the end because `String.prototype.split` results
in an array with one empty string if there was nothing to split.

Now it looks like:

```
Cache not found for input keys: xxx
```
2019-11-05 15:33:41 -05:00
Josh Gross
b034b26a44 Bump cache limit to 400MB (#61) 2019-11-05 15:24:22 -05:00
Hugo van Kemenade
e1ed41a9c9 Link to docs (#58)
* Link to docs

* Attempt to default to user's browser language first
2019-11-05 14:09:13 -05:00
Hugo van Kemenade
5f4d4d4555 Alphabetise examples (#52) 2019-11-05 12:04:07 -05:00
Koen Punt
5d3ad75a2b Update example formatting (#57)
* adjust formatting of Carthage example

* enable syntax highlighting for Cargo example
2019-11-05 11:03:56 -05:00
Josh Gross
d8c5e69fe2 Update badge to filter to master push events 2019-11-05 10:59:26 -05:00
Josh Gross
f66a56e59e Bump version to v1 (#51) 2019-11-04 16:40:33 -05:00
25 changed files with 11630 additions and 6836 deletions

View File

@@ -12,5 +12,12 @@
"plugin:prettier/recommended",
"prettier/@typescript-eslint"
],
"plugins": ["@typescript-eslint", "jest"]
"plugins": ["@typescript-eslint", "simple-import-sort", "jest"],
"rules": {
"import/first": "error",
"import/newline-after-import": "error",
"import/no-duplicates": "error",
"simple-import-sort/sort": "error",
"sort-imports": "off"
}
}

35
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: "Code Scanning - Action"
on:
push:
schedule:
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below).
- name: Autobuild
uses: github/codeql-action/autobuild@v1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -13,41 +13,129 @@ on:
- '**.md'
jobs:
test:
name: Test on ${{ matrix.os }}
# Build and unit test
build:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: '12.x'
- name: Get npm cache directory
- name: Determine npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v1
- name: Restore npm cache
uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- run: npm ci
- name: Prettier Format Check
run: npm run format-check
- name: ESLint Check
run: npm run lint
- name: Build & Test
run: npm run test
# End to end save and restore
test-save:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Generate files in working directory
shell: bash
run: __tests__/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
- name: Save cache
uses: ./
with:
key: test-${{ runner.os }}-${{ github.run_id }}
path: |
test-cache
~/test-cache
test-restore:
needs: test-save
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
key: test-${{ runner.os }}-${{ github.run_id }}
path: |
test-cache
~/test-cache
- name: Verify cache files in working directory
shell: bash
run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache
- name: Verify cache files outside working directory
shell: bash
run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
# End to end with proxy
test-proxy-save:
runs-on: ubuntu-latest
container:
image: ubuntu:latest
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Generate files
run: __tests__/create-cache-files.sh proxy test-cache
- name: Save cache
uses: ./
with:
key: test-proxy-${{ github.run_id }}
path: test-cache
test-proxy-restore:
needs: test-proxy-save
runs-on: ubuntu-latest
container:
image: ubuntu:latest
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
key: test-proxy-${{ github.run_id }}
path: test-cache
- name: Verify cache
run: __tests__/verify-cache-files.sh proxy test-cache

6
.gitignore vendored
View File

@@ -1,8 +1,5 @@
__tests__/runner/*
# comment out in distribution branches
dist/
node_modules/
lib/
@@ -94,3 +91,6 @@ typings/
# DynamoDB Local files
.dynamodb/
# Text editor files
.vscode/

View File

@@ -1,6 +1,6 @@
# cache
This GitHub Action allows caching dependencies and build outputs to improve workflow execution time.
This action allows caching dependencies and build outputs to improve workflow execution time.
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
@@ -35,9 +35,9 @@ on: push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- name: Cache Primes
id: cache-primes
@@ -49,18 +49,40 @@ jobs:
- name: Generate Prime Numbers
if: steps.cache-primes.outputs.cache-hit != 'true'
run: /generate-primes.sh -d prime-numbers
- name: Use Prime Numbers
run: /primes.sh -d prime-numbers
```
## Ecosystem Examples
## Implementation Examples
See [Examples](examples.md)
Every programming language and framework has its own way of caching.
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
- [C# - Nuget](./examples.md#c---nuget)
- [Elixir - Mix](./examples.md#elixir---mix)
- [Go - Modules](./examples.md#go---modules)
- [Haskell - Cabal](./examples.md#haskell---cabal)
- [Java - Gradle](./examples.md#java---gradle)
- [Java - Maven](./examples.md#java---maven)
- [Node - npm](./examples.md#node---npm)
- [Node - Lerna](./examples.md#node---lerna)
- [Node - Yarn](./examples.md#node---yarn)
- [OCaml/Reason - esy](./examples.md##ocamlreason---esy)
- [PHP - Composer](./examples.md#php---composer)
- [Python - pip](./examples.md#python---pip)
- [R - renv](./examples.md#r---renv)
- [Ruby - Bundler](./examples.md#ruby---bundler)
- [Rust - Cargo](./examples.md#rust---cargo)
- [Scala - SBT](./examples.md#scala---sbt)
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
## Cache Limits
Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
## Skipping steps based on cache-hit
@@ -69,14 +91,14 @@ Using the `cache-hit` output, subsequent steps (such as install or build) can be
Example:
```yaml
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- uses: actions/cache@v1
id: cache
with:
path: path/to/dependencies
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: /install.sh

View File

@@ -1,4 +1,6 @@
import * as core from "@actions/core";
import * as io from "@actions/io";
import { promises as fs } from "fs";
import * as os from "os";
import * as path from "path";
@@ -6,13 +8,24 @@ import { Events, Outputs, State } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import * as actionUtils from "../src/utils/actionUtils";
import uuid = require("uuid");
jest.mock("@actions/core");
jest.mock("os");
function getTempDir(): string {
return path.join(__dirname, "_temp", "actionUtils");
}
afterEach(() => {
delete process.env[Events.Key];
});
afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"];
await io.rmRF(getTempDir());
});
test("getArchiveFileSize returns file size", () => {
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
@@ -181,17 +194,43 @@ test("isValidEvent returns false for unknown event", () => {
expect(isValidEvent).toBe(false);
});
test("resolvePath with no ~ in path", () => {
const filePath = ".cache/yarn";
test("resolvePaths with no ~ in path", async () => {
const filePath = ".cache";
const resolvedPath = actionUtils.resolvePath(filePath);
// Create the following layout:
// cwd
// cwd/.cache
// cwd/.cache/file.txt
const expectedPath = path.resolve(filePath);
expect(resolvedPath).toBe(expectedPath);
const root = path.join(getTempDir(), "no-tilde");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
const cache = path.join(root, ".cache");
await fs.mkdir(cache, { recursive: true });
await fs.writeFile(path.join(cache, "file.txt"), "cached");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [filePath];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("resolvePath with ~ in path", () => {
const filePath = "~/.cache/yarn";
test("resolvePaths with ~ in path", async () => {
const cacheDir = uuid();
const filePath = `~/${cacheDir}`;
// Create the following layout:
// ~/uuid
// ~/uuid/file.txt
const homedir = jest.requireActual("os").homedir();
const homedirMock = jest.spyOn(os, "homedir");
@@ -199,24 +238,93 @@ test("resolvePath with ~ in path", () => {
return homedir;
});
const resolvedPath = actionUtils.resolvePath(filePath);
const target = path.join(homedir, cacheDir);
await fs.mkdir(target, { recursive: true });
await fs.writeFile(path.join(target, "file.txt"), "cached");
const expectedPath = path.join(homedir, ".cache/yarn");
expect(resolvedPath).toBe(expectedPath);
const root = getTempDir();
process.env["GITHUB_WORKSPACE"] = root;
try {
const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [path.relative(root, target)];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
await io.rmRF(target);
}
});
test("resolvePath with home not found", () => {
test("resolvePaths with home not found", async () => {
const filePath = "~/.cache/yarn";
const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => {
return "";
});
expect(() => actionUtils.resolvePath(filePath)).toThrow(
"Unable to resolve `~` to HOME"
await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
"Unable to determine HOME directory"
);
});
test("resolvePaths inclusion pattern returns found", async () => {
const pattern = "*.ts";
// Create the following layout:
// inclusion-patterns
// inclusion-patterns/miss.txt
// inclusion-patterns/test.ts
const root = path.join(getTempDir(), "inclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([pattern]);
const expectedPath = ["test.ts"];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("resolvePaths exclusion pattern returns not found", async () => {
const patterns = ["*.ts", "!test.ts"];
// Create the following layout:
// exclusion-patterns
// exclusion-patterns/miss.txt
// exclusion-patterns/test.ts
const root = path.join(getTempDir(), "exclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "no match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths(patterns);
const expectedPath = [];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("isValidEvent returns true for push event", () => {
const event = Events.Push;
process.env[Events.Key] = event;
@@ -234,3 +342,16 @@ test("isValidEvent returns true for pull request event", () => {
expect(isValidEvent).toBe(true);
});
test("unlinkFile unlinks file", async () => {
const testDirectory = await fs.mkdtemp("unlinkFileTest");
const testFile = path.join(testDirectory, "test.txt");
await fs.writeFile(testFile, "hello world");
await actionUtils.unlinkFile(testFile);
// This should throw as testFile should not exist
await expect(fs.stat(testFile)).rejects.toThrow();
await fs.rmdir(testDirectory);
});

View File

@@ -0,0 +1,177 @@
import { getCacheVersion, retry } from "../src/cacheHttpClient";
import { CompressionMethod, Inputs } from "../src/constants";
import * as testUtils from "../src/utils/testUtils";
afterEach(() => {
testUtils.clearInputs();
});
test("getCacheVersion with path input and compression method undefined returns version", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion();
expect(result).toEqual(
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
);
});
test("getCacheVersion with zstd compression returns version", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion(CompressionMethod.Zstd);
expect(result).toEqual(
"273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24"
);
});
test("getCacheVersion with gzip compression does not change vesion", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion(CompressionMethod.Gzip);
expect(result).toEqual(
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
);
});
test("getCacheVersion with no input throws", async () => {
expect(() => getCacheVersion()).toThrow();
});
interface TestResponse {
statusCode: number;
result: string | null;
}
function handleResponse(
response: TestResponse | undefined
): Promise<TestResponse> {
if (!response) {
fail("Retry method called too many times");
}
if (response.statusCode === 999) {
throw Error("Test Error");
} else {
return Promise.resolve(response);
}
}
async function testRetryExpectingResult(
responses: Array<TestResponse>,
expectedResult: string | null
): Promise<void> {
responses = responses.reverse(); // Reverse responses since we pop from end
const actualResult = await retry(
"test",
() => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
);
expect(actualResult.result).toEqual(expectedResult);
}
async function testRetryExpectingError(
responses: Array<TestResponse>
): Promise<void> {
responses = responses.reverse(); // Reverse responses since we pop from end
expect(
retry(
"test",
() => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
)
).rejects.toBeInstanceOf(Error);
}
test("retry works on successful response", async () => {
await testRetryExpectingResult(
[
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry works after retryable status code", async () => {
await testRetryExpectingResult(
[
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry fails after exhausting retries", async () => {
await testRetryExpectingError([
{
statusCode: 503,
result: null
},
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: "Ok"
}
]);
});
test("retry fails after non-retryable status code", async () => {
await testRetryExpectingError([
{
statusCode: 500,
result: null
},
{
statusCode: 200,
result: "Ok"
}
]);
});
test("retry works after error", async () => {
await testRetryExpectingResult(
[
{
statusCode: 999,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry returns after client error", async () => {
await testRetryExpectingResult(
[
{
statusCode: 400,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
null
);
});

17
__tests__/create-cache-files.sh Executable file
View File

@@ -0,0 +1,17 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
path="$2"
if [ -z "$path" ]; then
echo "Must supply path argument"
exit 1
fi
mkdir -p $path
echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt

View File

@@ -1,24 +1,24 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import {
CacheFilename,
CompressionMethod,
Events,
Inputs
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/restore";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/exec");
jest.mock("@actions/io");
jest.mock("../src/utils/actionUtils");
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
return path.resolve(filePath);
});
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
@@ -36,8 +36,9 @@ beforeAll(() => {
return actualUtils.getSupportedEvents();
});
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getCacheFileName(cm);
});
});
@@ -65,7 +66,8 @@ test("restore with invalid event outputs warning", async () => {
test("restore with no path should fail", async () => {
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
expect(failedMock).not.toHaveBeenCalledWith(
"Input required and not supplied: path"
);
});
@@ -142,7 +144,7 @@ test("restore with no cache found", async () => {
expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}.`
`Cache not found for input keys: ${key}`
);
});
@@ -201,13 +203,12 @@ test("restore with restore keys and no cache found", async () => {
expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}, ${restoreKey}.`
`Cache not found for input keys: ${key}, ${restoreKey}`
);
});
test("restore with cache found", async () => {
test("restore with gzip compressed cache found", async () => {
const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key
@@ -236,7 +237,7 @@ test("restore with cache found", async () => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const archivePath = path.join(tempPath, CacheFilename.Gzip);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
@@ -245,45 +246,45 @@ test("restore with cache found", async () => {
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const extractTarMock = jest.spyOn(tar, "extractTar");
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(getCacheMock).toHaveBeenCalledWith([key], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("restore with a pull request event and cache found", async () => {
test("restore with a pull request event and zstd compressed cache found", async () => {
const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key
@@ -314,7 +315,7 @@ test("restore with a pull request event and cache found", async () => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const archivePath = path.join(tempPath, CacheFilename.Zstd);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
@@ -323,47 +324,42 @@ test("restore with a pull request event and cache found", async () => {
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(getCacheMock).toHaveBeenCalledWith([key], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("restore with cache found for restore key", async () => {
const key = "node-test";
const restoreKey = "node-";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key,
@@ -393,7 +389,7 @@ test("restore with cache found for restore key", async () => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const archivePath = path.join(tempPath, CacheFilename.Zstd);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
@@ -402,35 +398,30 @@ test("restore with cache found for restore key", async () => {
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]);
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
@@ -439,4 +430,5 @@ test("restore with cache found for restore key", async () => {
`Cache restored from key: ${restoreKey}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});

View File

@@ -1,19 +1,23 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import {
CacheFilename,
CompressionMethod,
Events,
Inputs
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/core");
jest.mock("@actions/exec");
jest.mock("@actions/io");
jest.mock("../src/utils/actionUtils");
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
@@ -42,16 +46,19 @@ beforeAll(() => {
return actualUtils.getSupportedEvents();
});
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
return path.resolve(filePath);
});
jest.spyOn(actionUtils, "resolvePaths").mockImplementation(
async filePaths => {
return filePaths.map(x => path.resolve(x));
}
);
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
return Promise.resolve("/foo/bar");
});
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getCacheFileName(cm);
});
});
@@ -128,7 +135,7 @@ test("save with exact match returns early", async () => {
return primaryKey;
});
const execMock = jest.spyOn(exec, "exec");
const createTarMock = jest.spyOn(tar, "createTar");
await run();
@@ -136,7 +143,7 @@ test("save with exact match returns early", async () => {
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
expect(execMock).toHaveBeenCalledTimes(0);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
@@ -195,42 +202,93 @@ test("save with large cache outputs warning", async () => {
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath);
const execMock = jest.spyOn(exec, "exec");
const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
return cacheSize;
});
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
const archivePath = path.join("/foo/bar", "cache.tgz");
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
const archiveFolder = "/foo/bar";
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache."
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(-1);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("save with server error outputs warning", async () => {
@@ -256,44 +314,53 @@ test("save with server error outputs warning", async () => {
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath);
const execMock = jest.spyOn(exec, "exec");
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest
.spyOn(cacheHttpClient, "saveCache")
.mockImplementationOnce(() => {
throw new Error("HTTP Error Occurred");
});
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("save with valid inputs uploads a cache", async () => {
@@ -318,35 +385,44 @@ test("save with valid inputs uploads a cache", async () => {
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath);
const execMock = jest.spyOn(exec, "exec");
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});

204
__tests__/tar.test.ts Normal file
View File

@@ -0,0 +1,204 @@
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import { CacheFilename, CompressionMethod } from "../src/constants";
import * as tar from "../src/tar";
import * as utils from "../src/utils/actionUtils";
import fs = require("fs");
jest.mock("@actions/exec");
jest.mock("@actions/io");
const IS_WINDOWS = process.platform === "win32";
function getTempDir(): string {
return path.join(__dirname, "_temp", "tar");
}
beforeAll(async () => {
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
process.env["GITHUB_WORKSPACE"] = process.cwd();
await jest.requireActual("@actions/io").rmRF(getTempDir());
});
afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"];
await jest.requireActual("@actions/io").rmRF(getTempDir());
});
test("zstd extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = IS_WINDOWS
? `${process.env["windir"]}\\fakepath\\cache.tar`
: "cache.tar";
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Zstd);
expect(mkdirMock).toHaveBeenCalledWith(workspace);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"--use-compress-program",
"zstd -d --long=30",
"-xf",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
],
{ cwd: undefined }
);
});
test("gzip extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = IS_WINDOWS
? `${process.env["windir"]}\\fakepath\\cache.tar`
: "cache.tar";
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Gzip);
expect(mkdirMock).toHaveBeenCalledWith(workspace);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"-z",
"-xf",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
],
{ cwd: undefined }
);
});
test("gzip extract GNU tar on windows", async () => {
if (IS_WINDOWS) {
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
const isGnuMock = jest
.spyOn(utils, "useGnuTar")
.mockReturnValue(Promise.resolve(true));
const execMock = jest.spyOn(exec, "exec");
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Gzip);
expect(isGnuMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"tar"`,
[
"-z",
"-xf",
archivePath.replace(/\\/g, "/"),
"-P",
"-C",
workspace?.replace(/\\/g, "/"),
"--force-local"
],
{ cwd: undefined }
);
}
});
test("zstd create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archiveFolder = getTempDir();
const workspace = process.env["GITHUB_WORKSPACE"];
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
await fs.promises.mkdir(archiveFolder, { recursive: true });
await tar.createTar(
archiveFolder,
sourceDirectories,
CompressionMethod.Zstd
);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"--use-compress-program",
"zstd -T0 --long=30",
"-cf",
IS_WINDOWS
? CacheFilename.Zstd.replace(/\\/g, "/")
: CacheFilename.Zstd,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
"--files-from",
"manifest.txt"
],
{
cwd: archiveFolder
}
);
});
test("gzip create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archiveFolder = getTempDir();
const workspace = process.env["GITHUB_WORKSPACE"];
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
await fs.promises.mkdir(archiveFolder, { recursive: true });
await tar.createTar(
archiveFolder,
sourceDirectories,
CompressionMethod.Gzip
);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"-z",
"-cf",
IS_WINDOWS
? CacheFilename.Gzip.replace(/\\/g, "/")
: CacheFilename.Gzip,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
"--files-from",
"manifest.txt"
],
{
cwd: archiveFolder
}
);
});

36
__tests__/verify-cache-files.sh Executable file
View File

@@ -0,0 +1,36 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
path="$2"
if [ -z "$path" ]; then
echo "Must specify path argument"
exit 1
fi
# Sanity check GITHUB_RUN_ID defined
if [ -z "$GITHUB_RUN_ID" ]; then
echo "GITHUB_RUN_ID not defined"
exit 1
fi
# Verify file exists
file="$path/test-file.txt"
echo "Checking for $file"
if [ ! -e $file ]; then
echo "File does not exist"
exit 1
fi
# Verify file content
content="$(cat $file)"
echo "File content:\n$content"
if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then
echo "Unexpected file content"
exit 1
fi

View File

@@ -1,5 +1,5 @@
name: 'Cache'
description: 'Cache dependencies and build outputs to improve workflow execution time'
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
@@ -21,4 +21,4 @@ runs:
post-if: 'success()'
branding:
icon: 'archive'
color: 'gray-dark'
color: 'gray-dark'

6158
dist/restore/index.js vendored

File diff suppressed because it is too large Load Diff

6152
dist/save/index.js vendored

File diff suppressed because it is too large Load Diff

View File

@@ -1,27 +1,36 @@
# Examples
- [Examples](#examples)
- [C# - Nuget](#c---nuget)
- [C# - NuGet](#c---nuget)
- [Elixir - Mix](#elixir---mix)
- [Go - Modules](#go---modules)
- [Haskell - Cabal](#haskell---cabal)
- [Java - Gradle](#java---gradle)
- [Java - Maven](#java---maven)
- [Node - npm](#node---npm)
- [macOS and Ubuntu](#macos-and-ubuntu)
- [Windows](#windows)
- [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config)
- [Node - Lerna](#node---lerna)
- [Node - Yarn](#node---yarn)
- [OCaml/Reason - esy](#ocamlreason---esy)
- [PHP - Composer](#php---composer)
- [Python - pip](#python---pip)
- [Simple example](#simple-example)
- [Multiple OS's in a workflow](#multiple-oss-in-a-workflow)
- [Using pip to get cache location](#using-pip-to-get-cache-location)
- [Using a script to get cache location](#using-a-script-to-get-cache-location)
- [Ruby - Gem](#ruby---gem)
- [R - renv](#r---renv)
- [Simple example](#simple-example-1)
- [Multiple OS's in a workflow](#multiple-oss-in-a-workflow-1)
- [Ruby - Bundler](#ruby---bundler)
- [Rust - Cargo](#rust---cargo)
- [Scala - SBT](#scala---sbt)
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](#swift---swift-package-manager)
## C# - Nuget
## C# - NuGet
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
```yaml
@@ -33,6 +42,21 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
${{ runner.os }}-nuget-
```
Depending on the environment, huge packages might be pre-installed in the global cache folder.
If you do not want to include them, consider to move the cache folder like below.
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
```yaml
env:
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
steps:
- uses: actions/cache@v1
with:
path: ${{ github.workspace }}/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
## Elixir - Mix
```yaml
- uses: actions/cache@v1
@@ -54,13 +78,35 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
${{ runner.os }}-go-
```
## Haskell - Cabal
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
```yaml
- uses: actions/cache@v1
name: Cache ~/.cabal/packages
with:
path: ~/.cabal/packages
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
- uses: actions/cache@v1
name: Cache ~/.cabal/store
with:
path: ~/.cabal/store
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
- uses: actions/cache@v1
name: Cache dist-newstyle
with:
path: dist-newstyle
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
```
## Java - Gradle
```yaml
- uses: actions/cache@v1
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }}
restore-keys: |
${{ runner.os }}-gradle-
```
@@ -96,17 +142,6 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
### Windows
```yaml
- uses: actions/cache@v1
with:
path: ~\AppData\Roaming\npm-cache
key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
### Using multiple systems and `npm config`
```yaml
- name: Get npm cache directory
id: npm-cache
run: |
@@ -119,25 +154,82 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
${{ runner.os }}-node-
```
### Using multiple systems and `npm config`
```yaml
- name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
## Node - Lerna
>Note this example uses the new multi-paths feature and is only available at `master`
```yaml
- name: restore lerna
uses: actions/cache@master
with:
path: |
node_modules
*/*/node_modules
key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }}
```
## Node - Yarn
The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info.
```yaml
- name: Get yarn cache
id: yarn-cache
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.yarn-cache.outputs.dir }}
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
```
## OCaml/Reason - esy
Esy allows you to export built dependencies and import pre-built dependencies.
```yaml
- name: Restore Cache
id: restore-cache
uses: actions/cache@v1
with:
path: _export
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
restore-keys: |
${{ runner.os }}-esy-
- name: Esy install
run: 'esy install'
- name: Import Cache
run: |
esy import-dependencies _export
rm -rf _export
...(Build job)...
# Re-export dependencies if anything has changed or if it is the first time
- name: Setting dependency cache
run: |
esy export-dependencies
if: steps.restore-cache.outputs.cache-hit != 'true'
```
## PHP - Composer
```yaml
```yaml
- name: Get Composer Cache Directory
id: composer-cache
run: |
@@ -199,14 +291,32 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
${{ runner.os }}-pip-
```
### Using pip to get cache location
> Note: This requires pip 20.1+
```yaml
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v1
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
### Using a script to get cache location
> Note: This uses an internal pip API and may not always work
```yaml
- name: Get pip cache
id: pip-cache
run: |
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
- name: Get pip cache dir
id: pip-cache
run: |
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
- uses: actions/cache@v1
with:
@@ -216,15 +326,72 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
${{ runner.os }}-pip-
```
## Ruby - Gem
## R - renv
For renv, the cache directory will vary by OS. Look at https://rstudio.github.io/renv/articles/renv.html#cache
Locations:
- Ubuntu: `~/.local/share/renv`
- macOS: `~/Library/Application Support/renv`
- Windows: `%LOCALAPPDATA%/renv`
### Simple example
```yaml
- uses: actions/cache@v1
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Application Support/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
## Ruby - Bundler
```yaml
- uses: actions/cache@v1
with:
path: vendor/bundle
key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }}
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: |
${{ runner.os }}-gem-
${{ runner.os }}-gems-
```
When dependencies are installed later in the workflow, we must specify the same path for the bundler.
```yaml
- name: Bundle install
run: |
bundle config path vendor/bundle
bundle install --jobs 4 --retry 3
```
## Rust - Cargo
@@ -247,6 +414,21 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
```
## Scala - SBT
```yaml
- name: Cache SBT ivy cache
uses: actions/cache@v1
with:
path: ~/.ivy2/cache
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
- name: Cache SBT
uses: actions/cache@v1
with:
path: ~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
```
## Swift, Objective-C - Carthage
```yaml
@@ -268,3 +450,14 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
restore-keys: |
${{ runner.os }}-pods-
```
## Swift - Swift Package Manager
```yaml
- uses: actions/cache@v1
with:
path: .build
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
restore-keys: |
${{ runner.os }}-spm-
```

3977
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,15 @@
{
"name": "cache",
"version": "1.0.3",
"version": "1.1.2",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
"scripts": {
"build": "tsc",
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
"test": "tsc --noEmit && jest --coverage",
"lint": "eslint **/*.ts --cache",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/"
"format-check": "prettier --check **/*.ts"
},
"repository": {
"type": "git",
@@ -26,8 +25,9 @@
"dependencies": {
"@actions/core": "^1.2.0",
"@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^1.0.8",
"@actions/io": "^1.0.1",
"typed-rest-client": "^1.5.0",
"uuid": "^3.3.3"
},
"devDependencies": {
@@ -43,11 +43,12 @@
"eslint-plugin-import": "^2.18.2",
"eslint-plugin-jest": "^23.0.3",
"eslint-plugin-prettier": "^3.1.1",
"eslint-plugin-simple-import-sort": "^5.0.2",
"jest": "^24.8.0",
"jest-circus": "^24.7.1",
"nock": "^11.7.0",
"prettier": "1.18.2",
"prettier": "^1.19.1",
"ts-jest": "^24.0.2",
"typescript": "^3.6.4"
"typescript": "^3.7.3"
}
}

View File

@@ -1,26 +1,70 @@
import * as core from "@actions/core";
import { HttpClient, HttpCodes } from "@actions/http-client";
import { BearerCredentialHandler } from "@actions/http-client/auth";
import {
IHttpClientResponse,
IRequestOptions,
ITypedResponse
} from "@actions/http-client/interfaces";
import * as crypto from "crypto";
import * as fs from "fs";
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
import { HttpClient } from "typed-rest-client/HttpClient";
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
import { IRequestOptions, RestClient } from "typed-rest-client/RestClient";
import { ArtifactCacheEntry } from "./contracts";
import * as stream from "stream";
import * as util from "util";
function getCacheUrl(): string {
import { CompressionMethod, Inputs, SocketTimeout } from "./constants";
import {
ArtifactCacheEntry,
CacheOptions,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse
} from "./contracts";
import * as utils from "./utils/actionUtils";
const versionSalt = "1.0";
function isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300;
}
function isServerErrorStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return true;
}
return statusCode >= 500;
}
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl(resource: string): string {
// Ideally we just use ACTIONS_CACHE_URL
const cacheUrl: string = (
const baseUrl: string = (
process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
""
).replace("pipelines", "artifactcache");
if (!cacheUrl) {
if (!baseUrl) {
throw new Error(
"Cache Service Url not found, unable to restore cache."
);
}
core.debug(`Cache Url: ${cacheUrl}`);
return cacheUrl;
const url = `${baseUrl}_apis/artifactcache/${resource}`;
core.debug(`Resource Url: ${url}`);
return url;
}
function createAcceptHeader(type: string, apiVersion: string): string {
@@ -29,42 +73,132 @@ function createAcceptHeader(type: string, apiVersion: string): string {
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
headers: {
Accept: createAcceptHeader("application/json", "6.0-preview.1")
}
};
return requestOptions;
}
export async function getCacheEntry(
keys: string[]
): Promise<ArtifactCacheEntry | null> {
const cacheUrl = getCacheUrl();
function createHttpClient(): HttpClient {
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(
keys.join(",")
)}`;
const restClient = new RestClient("actions/cache", cacheUrl, [
bearerCredentialHandler
]);
const response = await restClient.get<ArtifactCacheEntry>(
resource,
return new HttpClient(
"actions/cache",
[bearerCredentialHandler],
getRequestOptions()
);
}
export function getCacheVersion(compressionMethod?: CompressionMethod): string {
const components = [core.getInput(Inputs.Path, { required: true })].concat(
compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : []
);
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto
.createHash("sha256")
.update(components.join("|"))
.digest("hex");
}
export async function retry<T>(
name: string,
method: () => Promise<T>,
getStatusCode: (T) => number | undefined,
maxAttempts = 2
): Promise<T> {
let response: T | undefined = undefined;
let statusCode: number | undefined = undefined;
let isRetryable = false;
let errorMessage = "";
let attempt = 1;
while (attempt <= maxAttempts) {
try {
response = await method();
statusCode = getStatusCode(response);
if (!isServerErrorStatusCode(statusCode)) {
return response;
}
isRetryable = isRetryableStatusCode(statusCode);
errorMessage = `Cache service responded with ${statusCode}`;
} catch (error) {
isRetryable = true;
errorMessage = error.message;
}
core.debug(
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
);
if (!isRetryable) {
core.debug(`${name} - Error is not retryable`);
break;
}
attempt++;
}
throw Error(`${name} failed: ${errorMessage}`);
}
export async function retryTypedResponse<T>(
name: string,
method: () => Promise<ITypedResponse<T>>,
maxAttempts = 2
): Promise<ITypedResponse<T>> {
return await retry(
name,
method,
(response: ITypedResponse<T>) => response.statusCode,
maxAttempts
);
}
export async function retryHttpClientResponse<T>(
name: string,
method: () => Promise<IHttpClientResponse>,
maxAttempts = 2
): Promise<IHttpClientResponse> {
return await retry(
name,
method,
(response: IHttpClientResponse) => response.message.statusCode,
maxAttempts
);
}
export async function getCacheEntry(
keys: string[],
options?: CacheOptions
): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient();
const version = getCacheVersion(options?.compressionMethod);
const resource = `cache?keys=${encodeURIComponent(
keys.join(",")
)}&version=${version}`;
const response = await retryTypedResponse("getCacheEntry", () =>
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
);
if (response.statusCode === 204) {
return null;
}
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
if (!cacheResult || !cacheResult.archiveLocation) {
const cacheDownloadUrl = cacheResult?.archiveLocation;
if (!cacheDownloadUrl) {
throw new Error("Cache not found.");
}
core.setSecret(cacheResult.archiveLocation);
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
@@ -73,56 +207,213 @@ export async function getCacheEntry(
async function pipeResponseToStream(
response: IHttpClientResponse,
stream: NodeJS.WritableStream
output: NodeJS.WritableStream
): Promise<void> {
return new Promise(resolve => {
response.message.pipe(stream).on("close", () => {
resolve();
});
});
const pipeline = util.promisify(stream.pipeline);
await pipeline(response.message, output);
}
export async function downloadCache(
cacheEntry: ArtifactCacheEntry,
archiveLocation: string,
archivePath: string
): Promise<void> {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache");
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!);
const downloadResponse = await retryHttpClientResponse(
"downloadCache",
() => httpClient.get(archiveLocation)
);
// Abort download if no traffic received over the socket.
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
downloadResponse.message.destroy();
core.debug(
`Aborting download, socket timed out after ${SocketTimeout} ms`
);
});
await pipeResponseToStream(downloadResponse, stream);
// Validate download size.
const contentLengthHeader =
downloadResponse.message.headers["content-length"];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSize(archivePath);
if (actualLength != expectedLength) {
throw new Error(
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
);
}
} else {
core.debug("Unable to validate download, no Content-Length header");
}
}
// Reserve Cache
export async function reserveCache(
key: string,
options?: CacheOptions
): Promise<number> {
const httpClient = createHttpClient();
const version = getCacheVersion(options?.compressionMethod);
const reserveCacheRequest: ReserveCacheRequest = {
key,
version
};
const response = await retryTypedResponse("reserveCache", () =>
httpClient.postJson<ReserveCacheResponse>(
getCacheApiUrl("caches"),
reserveCacheRequest
)
);
return response?.result?.cacheId ?? -1;
}
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
async function uploadChunk(
httpClient: HttpClient,
resourceUrl: string,
openStream: () => NodeJS.ReadableStream,
start: number,
end: number
): Promise<void> {
core.debug(
`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
);
const additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
};
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
return await httpClient.sendStream(
"PATCH",
resourceUrl,
openStream(),
additionalHeaders
);
};
await retryHttpClientResponse(
`uploadChunk (start: ${start}, end: ${end})`,
uploadChunkRequest
);
}
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
async function uploadFile(
httpClient: HttpClient,
cacheId: number,
archivePath: string
): Promise<void> {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, "r");
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE =
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(
fileSize - offset,
MAX_CHUNK_SIZE
);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
await uploadChunk(
httpClient,
resourceUrl,
() =>
fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
}),
start,
end
);
}
})
);
} finally {
fs.closeSync(fd);
}
return;
}
async function commitCache(
httpClient: HttpClient,
cacheId: number,
filesize: number
): Promise<ITypedResponse<null>> {
const commitCacheRequest: CommitCacheRequest = { size: filesize };
return await retryTypedResponse("commitCache", () =>
httpClient.postJson<null>(
getCacheApiUrl(`caches/${cacheId.toString()}`),
commitCacheRequest
)
);
}
export async function saveCache(
key: string,
cacheId: number,
archivePath: string
): Promise<void> {
const stream = fs.createReadStream(archivePath);
const httpClient = createHttpClient();
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
core.debug("Upload cache");
await uploadFile(httpClient, cacheId, archivePath);
const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`;
const postUrl = cacheUrl + resource;
const restClient = new RestClient("actions/cache", undefined, [
bearerCredentialHandler
]);
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream"
};
const response = await restClient.uploadStream<void>(
"POST",
postUrl,
stream,
requestOptions
// Commit Cache
core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = await commitCache(
httpClient,
cacheId,
cacheSize
);
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
);
}
core.info("Cache saved successfully");

View File

@@ -18,3 +18,18 @@ export enum Events {
Push = "push",
PullRequest = "pull_request"
}
export enum CacheFilename {
Gzip = "cache.tgz",
Zstd = "cache.tzst"
}
export enum CompressionMethod {
Gzip = "gzip",
Zstd = "zstd"
}
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
export const SocketTimeout = 5000;

19
src/contracts.d.ts vendored
View File

@@ -1,6 +1,25 @@
import { CompressionMethod } from "./constants";
export interface ArtifactCacheEntry {
cacheKey?: string;
scope?: string;
creationTime?: string;
archiveLocation?: string;
}
export interface CommitCacheRequest {
size: number;
}
export interface ReserveCacheRequest {
key: string;
version?: string;
}
export interface ReserveCacheResponse {
cacheId: number;
}
export interface CacheOptions {
compressionMethod?: CompressionMethod;
}

View File

@@ -1,9 +1,9 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { extractTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
@@ -20,11 +20,6 @@ async function run(): Promise<void> {
return;
}
const cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true })
);
core.debug(`Cache Path: ${cachePath}`);
const primaryKey = core.getInput(Inputs.Key, { required: true });
core.saveState(State.CacheKey, primaryKey);
@@ -59,55 +54,49 @@ async function run(): Promise<void> {
}
}
const compressionMethod = await utils.getCompressionMethod();
try {
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
if (!cacheEntry) {
core.info(
`Cache not found for input keys: ${keys.join(", ")}.`
);
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, {
compressionMethod: compressionMethod
});
if (!cacheEntry?.archiveLocation) {
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
return;
}
const archivePath = path.join(
await utils.createTempDirectory(),
"cache.tgz"
utils.getCacheFileName(compressionMethod)
);
core.debug(`Archive Path: ${archivePath}`);
// Store the cache result
utils.setCacheState(cacheEntry);
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(cacheEntry, archivePath);
try {
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(
cacheEntry.archiveLocation,
archivePath
);
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
);
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
);
// Create directory to extract tar into
await io.mkdirP(cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
await extractTar(archivePath, compressionMethod);
} finally {
// Try to delete the archive to save space
try {
await utils.unlinkFile(archivePath);
} catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
const isExactKeyMatch = utils.isExactKeyMatch(
primaryKey,

View File

@@ -1,9 +1,9 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { createTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
@@ -35,49 +35,53 @@ async function run(): Promise<void> {
return;
}
const cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true })
);
core.debug(`Cache Path: ${cachePath}`);
const compressionMethod = await utils.getCompressionMethod();
const archivePath = path.join(
await utils.createTempDirectory(),
"cache.tgz"
core.debug("Reserving Cache");
const cacheId = await cacheHttpClient.reserveCache(primaryKey, {
compressionMethod: compressionMethod
});
if (cacheId == -1) {
core.info(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
return;
}
core.debug(`Cache ID: ${cacheId}`);
const cachePaths = await utils.resolvePaths(
core
.getInput(Inputs.Path, { required: true })
.split("\n")
.filter(x => x !== "")
);
core.debug("Cache Paths:");
core.debug(`${JSON.stringify(cachePaths)}`);
const archiveFolder = await utils.createTempDirectory();
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
);
core.debug(`Archive Path: ${archivePath}`);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
await createTar(archiveFolder, cachePaths, compressionMethod);
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
const fileSizeLimit = 400 * 1024 * 1024; // 400MB
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
utils.logWarning(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
);
return;
}
await cacheHttpClient.saveCache(primaryKey, archivePath);
core.debug(`Saving Cache (ID: ${cacheId})`);
await cacheHttpClient.saveCache(cacheId, archivePath);
} catch (error) {
utils.logWarning(error.message);
}

87
src/tar.ts Normal file
View File

@@ -0,0 +1,87 @@
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import { existsSync, writeFileSync } from "fs";
import * as path from "path";
import { CompressionMethod } from "./constants";
import * as utils from "./utils/actionUtils";
async function getTarPath(args: string[]): Promise<string> {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (existsSync(systemTar)) {
return systemTar;
} else if (await utils.useGnuTar()) {
args.push("--force-local");
}
}
return await io.which("tar", true);
}
async function execTar(args: string[], cwd?: string): Promise<void> {
try {
await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd });
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`);
}
}
function getWorkingDirectory(): string {
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
}
export async function extractTar(
archivePath: string,
compressionMethod: CompressionMethod
): Promise<void> {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
await io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const args = [
...(compressionMethod == CompressionMethod.Zstd
? ["--use-compress-program", "zstd -d --long=30"]
: ["-z"]),
"-xf",
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-P",
"-C",
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
];
await execTar(args);
}
export async function createTar(
archiveFolder: string,
sourceDirectories: string[],
compressionMethod: CompressionMethod
): Promise<void> {
// Write source directories to manifest.txt to avoid command length limits
const manifestFilename = "manifest.txt";
const cacheFileName = utils.getCacheFileName(compressionMethod);
writeFileSync(
path.join(archiveFolder, manifestFilename),
sourceDirectories.join("\n")
);
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const workingDirectory = getWorkingDirectory();
const args = [
...(compressionMethod == CompressionMethod.Zstd
? ["--use-compress-program", "zstd -T0 --long=30"]
: ["-z"]),
"-cf",
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-P",
"-C",
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
"--files-from",
manifestFilename
];
await execTar(args, archiveFolder);
}

View File

@@ -1,11 +1,20 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as util from "util";
import * as uuidV4 from "uuid/v4";
import { Events, Outputs, State } from "../constants";
import {
CacheFilename,
CompressionMethod,
Events,
Outputs,
State
} from "../constants";
import { ArtifactCacheEntry } from "../contracts";
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
@@ -28,6 +37,7 @@ export async function createTempDirectory(): Promise<string> {
}
tempDirectory = path.join(baseLocation, "actions", "temp");
}
const dest = path.join(tempDirectory, uuidV4.default());
await io.mkdirP(dest);
return dest;
@@ -82,16 +92,21 @@ export function logWarning(message: string): void {
core.info(`${warningPrefix}${message}`);
}
export function resolvePath(filePath: string): string {
if (filePath[0] === "~") {
const home = os.homedir();
if (!home) {
throw new Error("Unable to resolve `~` to HOME");
}
return path.join(home, filePath.slice(1));
export async function resolvePaths(patterns: string[]): Promise<string[]> {
const paths: string[] = [];
const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd();
const globber = await glob.create(patterns.join("\n"), {
implicitDescendants: false
});
for await (const file of globber.globGenerator()) {
const relativeFile = path.relative(workspace, file);
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
}
return path.resolve(filePath);
return paths;
}
export function getSupportedEvents(): string[] {
@@ -105,3 +120,53 @@ export function isValidEvent(): boolean {
const githubEvent = process.env[Events.Key] || "";
return getSupportedEvents().includes(githubEvent);
}
export function unlinkFile(path: fs.PathLike): Promise<void> {
return util.promisify(fs.unlink)(path);
}
async function getVersion(app: string): Promise<string> {
core.debug(`Checking ${app} --version`);
let versionOutput = "";
try {
await exec.exec(`${app} --version`, [], {
ignoreReturnCode: true,
silent: true,
listeners: {
stdout: (data: Buffer): string =>
(versionOutput += data.toString()),
stderr: (data: Buffer): string =>
(versionOutput += data.toString())
}
});
} catch (err) {
core.debug(err.message);
}
versionOutput = versionOutput.trim();
core.debug(versionOutput);
return versionOutput;
}
export async function getCompressionMethod(): Promise<CompressionMethod> {
// Disabling zstd on Windows due to https://github.com/actions/cache/issues/301
if (os.platform() === "win32") {
return CompressionMethod.Gzip;
}
const versionOutput = await getVersion("zstd");
return versionOutput.toLowerCase().includes("zstd command line interface")
? CompressionMethod.Zstd
: CompressionMethod.Gzip;
}
export function getCacheFileName(compressionMethod: CompressionMethod): string {
return compressionMethod == CompressionMethod.Zstd
? CacheFilename.Zstd
: CacheFilename.Gzip;
}
export async function useGnuTar(): Promise<boolean> {
const versionOutput = await getVersion("tar");
return versionOutput.toLowerCase().includes("gnu tar");
}