mirror of
https://github.com/actions/cache.git
synced 2025-07-28 03:46:10 +08:00
Compare commits
9 Commits
tanuj077/c
...
t-dedah/ca
Author | SHA1 | Date | |
---|---|---|---|
|
3010f3bd47 | ||
|
abb58eaf29 | ||
|
67408f6dab | ||
|
82f0974fd6 | ||
|
36aa59375f | ||
|
53812f9a6a | ||
|
a90fbffdad | ||
|
b65f98495c | ||
|
e1165c0dec |
@@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "Node.js & TypeScript",
|
|
||||||
"image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye",
|
|
||||||
// Features to add to the dev container. More info: https://containers.dev/implementors/features.
|
|
||||||
// "features": {},
|
|
||||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
|
||||||
// "forwardPorts": [],
|
|
||||||
// Use 'postCreateCommand' to run commands after the container is created.
|
|
||||||
"postCreateCommand": "npm install && npm run build"
|
|
||||||
// Configure tool-specific properties.
|
|
||||||
// "customizations": {},
|
|
||||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
|
||||||
// "remoteUser": "root"
|
|
||||||
}
|
|
21
.github/auto_assign.yml
vendored
Normal file
21
.github/auto_assign.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Set to true to add reviewers to pull requests
|
||||||
|
addReviewers: true
|
||||||
|
|
||||||
|
# Set to true to add assignees to pull requests
|
||||||
|
addAssignees: false
|
||||||
|
|
||||||
|
# A list of reviewers to be added to pull requests (GitHub user name)
|
||||||
|
reviewers:
|
||||||
|
- anuragc617
|
||||||
|
- pallavx
|
||||||
|
- pdotl
|
||||||
|
- phantsure
|
||||||
|
- kotewar
|
||||||
|
- aparna-ravindra
|
||||||
|
- tiwarishub
|
||||||
|
- vsvipul
|
||||||
|
- bishal-pdmsft
|
||||||
|
|
||||||
|
# A number of reviewers added to the pull request
|
||||||
|
# Set 0 to add all the reviewers (default: 0)
|
||||||
|
numberOfReviewers: 1
|
20
.github/workflows/add-reviewer-pr.yml
vendored
20
.github/workflows/add-reviewer-pr.yml
vendored
@@ -1,20 +0,0 @@
|
|||||||
name: Add Reviewer PR
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened]
|
|
||||||
jobs:
|
|
||||||
run-action:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Get current oncall
|
|
||||||
id: oncall
|
|
||||||
run: |
|
|
||||||
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Request Review
|
|
||||||
run: |
|
|
||||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/pulls/${{ github.event.pull_request.number}}/requested_reviewers -d '{"reviewers":["${{steps.oncall.outputs.CURRENT}}"]}'
|
|
||||||
|
|
||||||
- name: Add Assignee
|
|
||||||
run: |
|
|
||||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
|
16
.github/workflows/assign-issue.yml
vendored
16
.github/workflows/assign-issue.yml
vendored
@@ -1,16 +0,0 @@
|
|||||||
name: Assign issue
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [opened]
|
|
||||||
jobs:
|
|
||||||
run-action:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Get current oncall
|
|
||||||
id: oncall
|
|
||||||
run: |
|
|
||||||
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: add_assignees
|
|
||||||
run: |
|
|
||||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
|
15
.github/workflows/auto-assign-issues.yml
vendored
Normal file
15
.github/workflows/auto-assign-issues.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: Issue assignment
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [opened]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
auto-assign:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Auto-assign issue'
|
||||||
|
uses: pozil/auto-assign-issue@v1.4.0
|
||||||
|
with:
|
||||||
|
assignees: anuragc617,pallavx,pdotl,phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
|
||||||
|
numOfAssignee: 1
|
10
.github/workflows/auto-assign.yml
vendored
Normal file
10
.github/workflows/auto-assign.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
name: 'Auto Assign'
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [opened, ready_for_review]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
add-reviews:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: kentaro-m/auto-assign-action@v1.2.1
|
@@ -324,113 +324,3 @@ test("restore with cache found for restore key", async () => {
|
|||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with enabling save on any failure feature", async () => {
|
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
|
||||||
const restoreKey = "node-";
|
|
||||||
testUtils.setInputs({
|
|
||||||
path: path,
|
|
||||||
key,
|
|
||||||
restoreKeys: [restoreKey],
|
|
||||||
saveOnAnyFailure: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const debugMock = jest.spyOn(core, "debug");
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return Promise.resolve(restoreKey);
|
|
||||||
});
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
|
||||||
|
|
||||||
expect(debugMock).toHaveBeenCalledWith(
|
|
||||||
`Exporting environment variable SAVE_CACHE_ON_ANY_FAILURE`
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
|
||||||
`Input Variable SAVE_CACHE_ON_ANY_FAILURE is set to true, the cache will be saved despite of any failure in the build.`
|
|
||||||
);
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Fail restore when fail on cache miss is enabled and primary key not found", async () => {
|
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
|
||||||
const restoreKey = "node-";
|
|
||||||
testUtils.setInputs({
|
|
||||||
path: path,
|
|
||||||
key,
|
|
||||||
restoreKeys: [restoreKey],
|
|
||||||
failOnCacheMiss: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return Promise.resolve(undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(0);
|
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
|
||||||
`Cache with the given input key ${key} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
|
||||||
);
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Fail restore when fail on cache miss is enabled and primary key doesn't match restored key", async () => {
|
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
|
||||||
const restoreKey = "node-";
|
|
||||||
testUtils.setInputs({
|
|
||||||
path: path,
|
|
||||||
key,
|
|
||||||
restoreKeys: [restoreKey],
|
|
||||||
failOnCacheMiss: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return Promise.resolve(restoreKey);
|
|
||||||
});
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
|
||||||
`Restored cache key doesn't match the given input key ${key}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
|
||||||
);
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
@@ -1,165 +0,0 @@
|
|||||||
import * as cache from "@actions/cache";
|
|
||||||
import * as core from "@actions/core";
|
|
||||||
|
|
||||||
import { Events, Inputs, RefKey } from "../src/constants";
|
|
||||||
import run from "../src/save-only";
|
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
|
||||||
|
|
||||||
jest.mock("@actions/core");
|
|
||||||
jest.mock("@actions/cache");
|
|
||||||
jest.mock("../src/utils/actionUtils");
|
|
||||||
|
|
||||||
beforeAll(() => {
|
|
||||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
|
||||||
return jest.requireActual("@actions/core").getInput(name, options);
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => {
|
|
||||||
return jest.requireActual("../src/utils/actionUtils").getCacheState();
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
|
||||||
(name, options) => {
|
|
||||||
return jest
|
|
||||||
.requireActual("../src/utils/actionUtils")
|
|
||||||
.getInputAsArray(name, options);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "getInputAsInt").mockImplementation(
|
|
||||||
(name, options) => {
|
|
||||||
return jest
|
|
||||||
.requireActual("../src/utils/actionUtils")
|
|
||||||
.getInputAsInt(name, options);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
|
||||||
(key, cacheResult) => {
|
|
||||||
return jest
|
|
||||||
.requireActual("../src/utils/actionUtils")
|
|
||||||
.isExactKeyMatch(key, cacheResult);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
|
|
||||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
|
||||||
return actualUtils.isValidEvent();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
process.env[Events.Key] = Events.Push;
|
|
||||||
process.env[RefKey] = "refs/heads/feature-branch";
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
|
||||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
|
||||||
() => true
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
testUtils.clearInputs();
|
|
||||||
delete process.env[Events.Key];
|
|
||||||
delete process.env[RefKey];
|
|
||||||
});
|
|
||||||
|
|
||||||
test("save cache when save-only is required", async () => {
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
|
|
||||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
|
||||||
const savedCacheKey = "Linux-node-";
|
|
||||||
|
|
||||||
jest.spyOn(core, "getInput")
|
|
||||||
// Cache Entry State
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return savedCacheKey;
|
|
||||||
})
|
|
||||||
// Cache Key
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return primaryKey;
|
|
||||||
});
|
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
|
||||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
|
||||||
|
|
||||||
const cacheId = 4;
|
|
||||||
const saveCacheMock = jest
|
|
||||||
.spyOn(cache, "saveCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return Promise.resolve(cacheId);
|
|
||||||
});
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
|
||||||
uploadChunkSize: 4000000
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("save when save on any failure is true", async () => {
|
|
||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
|
|
||||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
|
||||||
const primaryKey = "Linux-node-";
|
|
||||||
const inputPath = "node_modules";
|
|
||||||
|
|
||||||
jest.spyOn(core, "getInput")
|
|
||||||
// Cache Entry State
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return savedCacheKey;
|
|
||||||
})
|
|
||||||
// Cache Key
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return primaryKey;
|
|
||||||
});
|
|
||||||
|
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
|
||||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
|
||||||
testUtils.setInput(Inputs.SaveOnAnyFailure, "true");
|
|
||||||
|
|
||||||
const cacheId = 4;
|
|
||||||
const saveCacheMock = jest
|
|
||||||
.spyOn(cache, "saveCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return Promise.resolve(cacheId);
|
|
||||||
});
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("save with no primary key in input outputs warning", async () => {
|
|
||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
|
|
||||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
|
||||||
jest.spyOn(core, "getState")
|
|
||||||
// Cache Entry State
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return savedCacheKey;
|
|
||||||
})
|
|
||||||
// Cache Key
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return "";
|
|
||||||
});
|
|
||||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
|
||||||
`Error retrieving key from inputs.`
|
|
||||||
);
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
10
action.yml
10
action.yml
@@ -14,14 +14,6 @@ inputs:
|
|||||||
upload-chunk-size:
|
upload-chunk-size:
|
||||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||||
required: false
|
required: false
|
||||||
exit-on-cache-miss:
|
|
||||||
description: 'Fail the workflow if the cache is not found for the primary key'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
save-on-any-failure:
|
|
||||||
description: 'Save cache (on cache miss) despite of any failure during the workflow run'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
@@ -29,7 +21,7 @@ runs:
|
|||||||
using: 'node16'
|
using: 'node16'
|
||||||
main: 'dist/restore/index.js'
|
main: 'dist/restore/index.js'
|
||||||
post: 'dist/save/index.js'
|
post: 'dist/save/index.js'
|
||||||
post-if: (success() || (env.SAVE_CACHE_ON_ANY_FAILURE == 'yes'))
|
post-if: 'success()'
|
||||||
branding:
|
branding:
|
||||||
icon: 'archive'
|
icon: 'archive'
|
||||||
color: 'gray-dark'
|
color: 'gray-dark'
|
||||||
|
148
dist/restore/index.js
vendored
148
dist/restore/index.js
vendored
@@ -1892,10 +1892,10 @@ function serial(list, iterator, callback)
|
|||||||
module.exports = minimatch
|
module.exports = minimatch
|
||||||
minimatch.Minimatch = Minimatch
|
minimatch.Minimatch = Minimatch
|
||||||
|
|
||||||
var path = (function () { try { return __webpack_require__(622) } catch (e) {}}()) || {
|
var path = { sep: '/' }
|
||||||
sep: '/'
|
try {
|
||||||
}
|
path = __webpack_require__(622)
|
||||||
minimatch.sep = path.sep
|
} catch (er) {}
|
||||||
|
|
||||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||||
var expand = __webpack_require__(306)
|
var expand = __webpack_require__(306)
|
||||||
@@ -1947,64 +1947,43 @@ function filter (pattern, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function ext (a, b) {
|
function ext (a, b) {
|
||||||
|
a = a || {}
|
||||||
b = b || {}
|
b = b || {}
|
||||||
var t = {}
|
var t = {}
|
||||||
Object.keys(a).forEach(function (k) {
|
|
||||||
t[k] = a[k]
|
|
||||||
})
|
|
||||||
Object.keys(b).forEach(function (k) {
|
Object.keys(b).forEach(function (k) {
|
||||||
t[k] = b[k]
|
t[k] = b[k]
|
||||||
})
|
})
|
||||||
|
Object.keys(a).forEach(function (k) {
|
||||||
|
t[k] = a[k]
|
||||||
|
})
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
minimatch.defaults = function (def) {
|
minimatch.defaults = function (def) {
|
||||||
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
if (!def || !Object.keys(def).length) return minimatch
|
||||||
return minimatch
|
|
||||||
}
|
|
||||||
|
|
||||||
var orig = minimatch
|
var orig = minimatch
|
||||||
|
|
||||||
var m = function minimatch (p, pattern, options) {
|
var m = function minimatch (p, pattern, options) {
|
||||||
return orig(p, pattern, ext(def, options))
|
return orig.minimatch(p, pattern, ext(def, options))
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Minimatch = function Minimatch (pattern, options) {
|
m.Minimatch = function Minimatch (pattern, options) {
|
||||||
return new orig.Minimatch(pattern, ext(def, options))
|
return new orig.Minimatch(pattern, ext(def, options))
|
||||||
}
|
}
|
||||||
m.Minimatch.defaults = function defaults (options) {
|
|
||||||
return orig.defaults(ext(def, options)).Minimatch
|
|
||||||
}
|
|
||||||
|
|
||||||
m.filter = function filter (pattern, options) {
|
|
||||||
return orig.filter(pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.defaults = function defaults (options) {
|
|
||||||
return orig.defaults(ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.makeRe = function makeRe (pattern, options) {
|
|
||||||
return orig.makeRe(pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.braceExpand = function braceExpand (pattern, options) {
|
|
||||||
return orig.braceExpand(pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.match = function (list, pattern, options) {
|
|
||||||
return orig.match(list, pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
||||||
Minimatch.defaults = function (def) {
|
Minimatch.defaults = function (def) {
|
||||||
|
if (!def || !Object.keys(def).length) return Minimatch
|
||||||
return minimatch.defaults(def).Minimatch
|
return minimatch.defaults(def).Minimatch
|
||||||
}
|
}
|
||||||
|
|
||||||
function minimatch (p, pattern, options) {
|
function minimatch (p, pattern, options) {
|
||||||
assertValidPattern(pattern)
|
if (typeof pattern !== 'string') {
|
||||||
|
throw new TypeError('glob pattern string required')
|
||||||
|
}
|
||||||
|
|
||||||
if (!options) options = {}
|
if (!options) options = {}
|
||||||
|
|
||||||
@@ -2013,6 +1992,9 @@ function minimatch (p, pattern, options) {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// "" only matches ""
|
||||||
|
if (pattern.trim() === '') return p === ''
|
||||||
|
|
||||||
return new Minimatch(pattern, options).match(p)
|
return new Minimatch(pattern, options).match(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2021,14 +2003,15 @@ function Minimatch (pattern, options) {
|
|||||||
return new Minimatch(pattern, options)
|
return new Minimatch(pattern, options)
|
||||||
}
|
}
|
||||||
|
|
||||||
assertValidPattern(pattern)
|
if (typeof pattern !== 'string') {
|
||||||
|
throw new TypeError('glob pattern string required')
|
||||||
|
}
|
||||||
|
|
||||||
if (!options) options = {}
|
if (!options) options = {}
|
||||||
|
|
||||||
pattern = pattern.trim()
|
pattern = pattern.trim()
|
||||||
|
|
||||||
// windows support: need to use /, not \
|
// windows support: need to use /, not \
|
||||||
if (!options.allowWindowsEscape && path.sep !== '/') {
|
if (path.sep !== '/') {
|
||||||
pattern = pattern.split(path.sep).join('/')
|
pattern = pattern.split(path.sep).join('/')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2039,7 +2022,6 @@ function Minimatch (pattern, options) {
|
|||||||
this.negate = false
|
this.negate = false
|
||||||
this.comment = false
|
this.comment = false
|
||||||
this.empty = false
|
this.empty = false
|
||||||
this.partial = !!options.partial
|
|
||||||
|
|
||||||
// make the set of regexps etc.
|
// make the set of regexps etc.
|
||||||
this.make()
|
this.make()
|
||||||
@@ -2049,6 +2031,9 @@ Minimatch.prototype.debug = function () {}
|
|||||||
|
|
||||||
Minimatch.prototype.make = make
|
Minimatch.prototype.make = make
|
||||||
function make () {
|
function make () {
|
||||||
|
// don't do it more than once.
|
||||||
|
if (this._made) return
|
||||||
|
|
||||||
var pattern = this.pattern
|
var pattern = this.pattern
|
||||||
var options = this.options
|
var options = this.options
|
||||||
|
|
||||||
@@ -2068,7 +2053,7 @@ function make () {
|
|||||||
// step 2: expand braces
|
// step 2: expand braces
|
||||||
var set = this.globSet = this.braceExpand()
|
var set = this.globSet = this.braceExpand()
|
||||||
|
|
||||||
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
|
if (options.debug) this.debug = console.error
|
||||||
|
|
||||||
this.debug(this.pattern, set)
|
this.debug(this.pattern, set)
|
||||||
|
|
||||||
@@ -2148,11 +2133,12 @@ function braceExpand (pattern, options) {
|
|||||||
pattern = typeof pattern === 'undefined'
|
pattern = typeof pattern === 'undefined'
|
||||||
? this.pattern : pattern
|
? this.pattern : pattern
|
||||||
|
|
||||||
assertValidPattern(pattern)
|
if (typeof pattern === 'undefined') {
|
||||||
|
throw new TypeError('undefined pattern')
|
||||||
|
}
|
||||||
|
|
||||||
// Thanks to Yeting Li <https://github.com/yetingli> for
|
if (options.nobrace ||
|
||||||
// improving this regexp to avoid a ReDOS vulnerability.
|
!pattern.match(/\{.*\}/)) {
|
||||||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
|
||||||
// shortcut. no need to expand.
|
// shortcut. no need to expand.
|
||||||
return [pattern]
|
return [pattern]
|
||||||
}
|
}
|
||||||
@@ -2160,17 +2146,6 @@ function braceExpand (pattern, options) {
|
|||||||
return expand(pattern)
|
return expand(pattern)
|
||||||
}
|
}
|
||||||
|
|
||||||
var MAX_PATTERN_LENGTH = 1024 * 64
|
|
||||||
var assertValidPattern = function (pattern) {
|
|
||||||
if (typeof pattern !== 'string') {
|
|
||||||
throw new TypeError('invalid pattern')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
|
||||||
throw new TypeError('pattern is too long')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse a component of the expanded set.
|
// parse a component of the expanded set.
|
||||||
// At this point, no pattern may contain "/" in it
|
// At this point, no pattern may contain "/" in it
|
||||||
// so we're going to return a 2d array, where each entry is the full
|
// so we're going to return a 2d array, where each entry is the full
|
||||||
@@ -2185,17 +2160,14 @@ var assertValidPattern = function (pattern) {
|
|||||||
Minimatch.prototype.parse = parse
|
Minimatch.prototype.parse = parse
|
||||||
var SUBPARSE = {}
|
var SUBPARSE = {}
|
||||||
function parse (pattern, isSub) {
|
function parse (pattern, isSub) {
|
||||||
assertValidPattern(pattern)
|
if (pattern.length > 1024 * 64) {
|
||||||
|
throw new TypeError('pattern is too long')
|
||||||
|
}
|
||||||
|
|
||||||
var options = this.options
|
var options = this.options
|
||||||
|
|
||||||
// shortcuts
|
// shortcuts
|
||||||
if (pattern === '**') {
|
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||||||
if (!options.noglobstar)
|
|
||||||
return GLOBSTAR
|
|
||||||
else
|
|
||||||
pattern = '*'
|
|
||||||
}
|
|
||||||
if (pattern === '') return ''
|
if (pattern === '') return ''
|
||||||
|
|
||||||
var re = ''
|
var re = ''
|
||||||
@@ -2251,12 +2223,10 @@ function parse (pattern, isSub) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch (c) {
|
switch (c) {
|
||||||
/* istanbul ignore next */
|
case '/':
|
||||||
case '/': {
|
|
||||||
// completely not allowed, even escaped.
|
// completely not allowed, even escaped.
|
||||||
// Should already be path-split by now.
|
// Should already be path-split by now.
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
|
|
||||||
case '\\':
|
case '\\':
|
||||||
clearStateChar()
|
clearStateChar()
|
||||||
@@ -2375,6 +2345,7 @@ function parse (pattern, isSub) {
|
|||||||
|
|
||||||
// handle the case where we left a class open.
|
// handle the case where we left a class open.
|
||||||
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||||||
|
if (inClass) {
|
||||||
// split where the last [ was, make sure we don't have
|
// split where the last [ was, make sure we don't have
|
||||||
// an invalid re. if so, re-walk the contents of the
|
// an invalid re. if so, re-walk the contents of the
|
||||||
// would-be class to re-translate any characters that
|
// would-be class to re-translate any characters that
|
||||||
@@ -2393,6 +2364,7 @@ function parse (pattern, isSub) {
|
|||||||
inClass = false
|
inClass = false
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// finish up the class.
|
// finish up the class.
|
||||||
hasMagic = true
|
hasMagic = true
|
||||||
@@ -2475,7 +2447,9 @@ function parse (pattern, isSub) {
|
|||||||
// something that could conceivably capture a dot
|
// something that could conceivably capture a dot
|
||||||
var addPatternStart = false
|
var addPatternStart = false
|
||||||
switch (re.charAt(0)) {
|
switch (re.charAt(0)) {
|
||||||
case '[': case '.': case '(': addPatternStart = true
|
case '.':
|
||||||
|
case '[':
|
||||||
|
case '(': addPatternStart = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hack to work around lack of negative lookbehind in JS
|
// Hack to work around lack of negative lookbehind in JS
|
||||||
@@ -2537,7 +2511,7 @@ function parse (pattern, isSub) {
|
|||||||
var flags = options.nocase ? 'i' : ''
|
var flags = options.nocase ? 'i' : ''
|
||||||
try {
|
try {
|
||||||
var regExp = new RegExp('^' + re + '$', flags)
|
var regExp = new RegExp('^' + re + '$', flags)
|
||||||
} catch (er) /* istanbul ignore next - should be impossible */ {
|
} catch (er) {
|
||||||
// If it was an invalid regular expression, then it can't match
|
// If it was an invalid regular expression, then it can't match
|
||||||
// anything. This trick looks for a character after the end of
|
// anything. This trick looks for a character after the end of
|
||||||
// the string, which is of course impossible, except in multi-line
|
// the string, which is of course impossible, except in multi-line
|
||||||
@@ -2595,7 +2569,7 @@ function makeRe () {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
this.regexp = new RegExp(re, flags)
|
this.regexp = new RegExp(re, flags)
|
||||||
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
} catch (ex) {
|
||||||
this.regexp = false
|
this.regexp = false
|
||||||
}
|
}
|
||||||
return this.regexp
|
return this.regexp
|
||||||
@@ -2613,8 +2587,8 @@ minimatch.match = function (list, pattern, options) {
|
|||||||
return list
|
return list
|
||||||
}
|
}
|
||||||
|
|
||||||
Minimatch.prototype.match = function match (f, partial) {
|
Minimatch.prototype.match = match
|
||||||
if (typeof partial === 'undefined') partial = this.partial
|
function match (f, partial) {
|
||||||
this.debug('match', f, this.pattern)
|
this.debug('match', f, this.pattern)
|
||||||
// short-circuit in the case of busted things.
|
// short-circuit in the case of busted things.
|
||||||
// comments, etc.
|
// comments, etc.
|
||||||
@@ -2696,7 +2670,6 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
|
|
||||||
// should be impossible.
|
// should be impossible.
|
||||||
// some invalid regexp stuff in the set.
|
// some invalid regexp stuff in the set.
|
||||||
/* istanbul ignore if */
|
|
||||||
if (p === false) return false
|
if (p === false) return false
|
||||||
|
|
||||||
if (p === GLOBSTAR) {
|
if (p === GLOBSTAR) {
|
||||||
@@ -2770,7 +2743,6 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
// no match was found.
|
// no match was found.
|
||||||
// However, in partial mode, we can't say this is necessarily over.
|
// However, in partial mode, we can't say this is necessarily over.
|
||||||
// If there's more *pattern* left, then
|
// If there's more *pattern* left, then
|
||||||
/* istanbul ignore if */
|
|
||||||
if (partial) {
|
if (partial) {
|
||||||
// ran out of file
|
// ran out of file
|
||||||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||||
@@ -2784,7 +2756,11 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
// patterns with magic have been turned into regexps.
|
// patterns with magic have been turned into regexps.
|
||||||
var hit
|
var hit
|
||||||
if (typeof p === 'string') {
|
if (typeof p === 'string') {
|
||||||
|
if (options.nocase) {
|
||||||
|
hit = f.toLowerCase() === p.toLowerCase()
|
||||||
|
} else {
|
||||||
hit = f === p
|
hit = f === p
|
||||||
|
}
|
||||||
this.debug('string match', p, f, hit)
|
this.debug('string match', p, f, hit)
|
||||||
} else {
|
} else {
|
||||||
hit = f.match(p)
|
hit = f.match(p)
|
||||||
@@ -2815,16 +2791,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
// this is ok if we're doing the match as part of
|
// this is ok if we're doing the match as part of
|
||||||
// a glob fs traversal.
|
// a glob fs traversal.
|
||||||
return partial
|
return partial
|
||||||
} else /* istanbul ignore else */ if (pi === pl) {
|
} else if (pi === pl) {
|
||||||
// ran out of pattern, still have file left.
|
// ran out of pattern, still have file left.
|
||||||
// this is only acceptable if we're on the very last
|
// this is only acceptable if we're on the very last
|
||||||
// empty segment of a file with a trailing slash.
|
// empty segment of a file with a trailing slash.
|
||||||
// a/* should match a/b/
|
// a/* should match a/b/
|
||||||
return (fi === fl - 1) && (file[fi] === '')
|
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
|
||||||
|
return emptyFileEnd
|
||||||
}
|
}
|
||||||
|
|
||||||
// should be unreachable.
|
// should be unreachable.
|
||||||
/* istanbul ignore next */
|
|
||||||
throw new Error('wtf?')
|
throw new Error('wtf?')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -4964,15 +4940,13 @@ exports.checkBypass = checkBypass;
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.RefKey = exports.Variables = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
exports.RefKey = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
||||||
var Inputs;
|
var Inputs;
|
||||||
(function (Inputs) {
|
(function (Inputs) {
|
||||||
Inputs["Key"] = "key";
|
Inputs["Key"] = "key";
|
||||||
Inputs["Path"] = "path";
|
Inputs["Path"] = "path";
|
||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
|
||||||
Inputs["SaveOnAnyFailure"] = "save-on-any-failure";
|
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
@@ -4989,10 +4963,6 @@ var Events;
|
|||||||
Events["Push"] = "push";
|
Events["Push"] = "push";
|
||||||
Events["PullRequest"] = "pull_request";
|
Events["PullRequest"] = "pull_request";
|
||||||
})(Events = exports.Events || (exports.Events = {}));
|
})(Events = exports.Events || (exports.Events = {}));
|
||||||
var Variables;
|
|
||||||
(function (Variables) {
|
|
||||||
Variables["SaveCacheOnAnyFailure"] = "SAVE_CACHE_ON_ANY_FAILURE";
|
|
||||||
})(Variables = exports.Variables || (exports.Variables = {}));
|
|
||||||
exports.RefKey = "GITHUB_REF";
|
exports.RefKey = "GITHUB_REF";
|
||||||
|
|
||||||
|
|
||||||
@@ -49014,17 +48984,7 @@ function run() {
|
|||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
||||||
//Check if user wants to save cache despite of failure in any previous job
|
|
||||||
const saveCache = core.getBooleanInput(constants_1.Inputs.SaveOnAnyFailure);
|
|
||||||
if (saveCache == true) {
|
|
||||||
core.debug(`Exporting environment variable ${constants_1.Variables.SaveCacheOnAnyFailure}`);
|
|
||||||
core.exportVariable(constants_1.Variables.SaveCacheOnAnyFailure, saveCache);
|
|
||||||
core.info(`Input Variable ${constants_1.Variables.SaveCacheOnAnyFailure} is set to true, the cache will be saved despite of any failure in the build.`);
|
|
||||||
}
|
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
if (core.getBooleanInput(constants_1.Inputs.FailOnCacheMiss) == true) {
|
|
||||||
throw new Error(`Cache with the given input key ${primaryKey} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`);
|
|
||||||
}
|
|
||||||
core.info(`Cache not found for input keys: ${[
|
core.info(`Cache not found for input keys: ${[
|
||||||
primaryKey,
|
primaryKey,
|
||||||
...restoreKeys
|
...restoreKeys
|
||||||
@@ -49035,10 +48995,6 @@ function run() {
|
|||||||
utils.setCacheState(cacheKey);
|
utils.setCacheState(cacheKey);
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
if (!isExactKeyMatch &&
|
|
||||||
core.getBooleanInput(constants_1.Inputs.FailOnCacheMiss) == true) {
|
|
||||||
throw new Error(`Restored cache key doesn't match the given input key ${primaryKey}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`);
|
|
||||||
}
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
61367
dist/save-only/index.js
vendored
61367
dist/save-only/index.js
vendored
File diff suppressed because one or more lines are too long
202
dist/save/index.js
vendored
202
dist/save/index.js
vendored
@@ -1892,10 +1892,10 @@ function serial(list, iterator, callback)
|
|||||||
module.exports = minimatch
|
module.exports = minimatch
|
||||||
minimatch.Minimatch = Minimatch
|
minimatch.Minimatch = Minimatch
|
||||||
|
|
||||||
var path = (function () { try { return __webpack_require__(622) } catch (e) {}}()) || {
|
var path = { sep: '/' }
|
||||||
sep: '/'
|
try {
|
||||||
}
|
path = __webpack_require__(622)
|
||||||
minimatch.sep = path.sep
|
} catch (er) {}
|
||||||
|
|
||||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||||
var expand = __webpack_require__(306)
|
var expand = __webpack_require__(306)
|
||||||
@@ -1947,64 +1947,43 @@ function filter (pattern, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function ext (a, b) {
|
function ext (a, b) {
|
||||||
|
a = a || {}
|
||||||
b = b || {}
|
b = b || {}
|
||||||
var t = {}
|
var t = {}
|
||||||
Object.keys(a).forEach(function (k) {
|
|
||||||
t[k] = a[k]
|
|
||||||
})
|
|
||||||
Object.keys(b).forEach(function (k) {
|
Object.keys(b).forEach(function (k) {
|
||||||
t[k] = b[k]
|
t[k] = b[k]
|
||||||
})
|
})
|
||||||
|
Object.keys(a).forEach(function (k) {
|
||||||
|
t[k] = a[k]
|
||||||
|
})
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
minimatch.defaults = function (def) {
|
minimatch.defaults = function (def) {
|
||||||
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
if (!def || !Object.keys(def).length) return minimatch
|
||||||
return minimatch
|
|
||||||
}
|
|
||||||
|
|
||||||
var orig = minimatch
|
var orig = minimatch
|
||||||
|
|
||||||
var m = function minimatch (p, pattern, options) {
|
var m = function minimatch (p, pattern, options) {
|
||||||
return orig(p, pattern, ext(def, options))
|
return orig.minimatch(p, pattern, ext(def, options))
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Minimatch = function Minimatch (pattern, options) {
|
m.Minimatch = function Minimatch (pattern, options) {
|
||||||
return new orig.Minimatch(pattern, ext(def, options))
|
return new orig.Minimatch(pattern, ext(def, options))
|
||||||
}
|
}
|
||||||
m.Minimatch.defaults = function defaults (options) {
|
|
||||||
return orig.defaults(ext(def, options)).Minimatch
|
|
||||||
}
|
|
||||||
|
|
||||||
m.filter = function filter (pattern, options) {
|
|
||||||
return orig.filter(pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.defaults = function defaults (options) {
|
|
||||||
return orig.defaults(ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.makeRe = function makeRe (pattern, options) {
|
|
||||||
return orig.makeRe(pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.braceExpand = function braceExpand (pattern, options) {
|
|
||||||
return orig.braceExpand(pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
m.match = function (list, pattern, options) {
|
|
||||||
return orig.match(list, pattern, ext(def, options))
|
|
||||||
}
|
|
||||||
|
|
||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
||||||
Minimatch.defaults = function (def) {
|
Minimatch.defaults = function (def) {
|
||||||
|
if (!def || !Object.keys(def).length) return Minimatch
|
||||||
return minimatch.defaults(def).Minimatch
|
return minimatch.defaults(def).Minimatch
|
||||||
}
|
}
|
||||||
|
|
||||||
function minimatch (p, pattern, options) {
|
function minimatch (p, pattern, options) {
|
||||||
assertValidPattern(pattern)
|
if (typeof pattern !== 'string') {
|
||||||
|
throw new TypeError('glob pattern string required')
|
||||||
|
}
|
||||||
|
|
||||||
if (!options) options = {}
|
if (!options) options = {}
|
||||||
|
|
||||||
@@ -2013,6 +1992,9 @@ function minimatch (p, pattern, options) {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// "" only matches ""
|
||||||
|
if (pattern.trim() === '') return p === ''
|
||||||
|
|
||||||
return new Minimatch(pattern, options).match(p)
|
return new Minimatch(pattern, options).match(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2021,14 +2003,15 @@ function Minimatch (pattern, options) {
|
|||||||
return new Minimatch(pattern, options)
|
return new Minimatch(pattern, options)
|
||||||
}
|
}
|
||||||
|
|
||||||
assertValidPattern(pattern)
|
if (typeof pattern !== 'string') {
|
||||||
|
throw new TypeError('glob pattern string required')
|
||||||
|
}
|
||||||
|
|
||||||
if (!options) options = {}
|
if (!options) options = {}
|
||||||
|
|
||||||
pattern = pattern.trim()
|
pattern = pattern.trim()
|
||||||
|
|
||||||
// windows support: need to use /, not \
|
// windows support: need to use /, not \
|
||||||
if (!options.allowWindowsEscape && path.sep !== '/') {
|
if (path.sep !== '/') {
|
||||||
pattern = pattern.split(path.sep).join('/')
|
pattern = pattern.split(path.sep).join('/')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2039,7 +2022,6 @@ function Minimatch (pattern, options) {
|
|||||||
this.negate = false
|
this.negate = false
|
||||||
this.comment = false
|
this.comment = false
|
||||||
this.empty = false
|
this.empty = false
|
||||||
this.partial = !!options.partial
|
|
||||||
|
|
||||||
// make the set of regexps etc.
|
// make the set of regexps etc.
|
||||||
this.make()
|
this.make()
|
||||||
@@ -2049,6 +2031,9 @@ Minimatch.prototype.debug = function () {}
|
|||||||
|
|
||||||
Minimatch.prototype.make = make
|
Minimatch.prototype.make = make
|
||||||
function make () {
|
function make () {
|
||||||
|
// don't do it more than once.
|
||||||
|
if (this._made) return
|
||||||
|
|
||||||
var pattern = this.pattern
|
var pattern = this.pattern
|
||||||
var options = this.options
|
var options = this.options
|
||||||
|
|
||||||
@@ -2068,7 +2053,7 @@ function make () {
|
|||||||
// step 2: expand braces
|
// step 2: expand braces
|
||||||
var set = this.globSet = this.braceExpand()
|
var set = this.globSet = this.braceExpand()
|
||||||
|
|
||||||
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
|
if (options.debug) this.debug = console.error
|
||||||
|
|
||||||
this.debug(this.pattern, set)
|
this.debug(this.pattern, set)
|
||||||
|
|
||||||
@@ -2148,11 +2133,12 @@ function braceExpand (pattern, options) {
|
|||||||
pattern = typeof pattern === 'undefined'
|
pattern = typeof pattern === 'undefined'
|
||||||
? this.pattern : pattern
|
? this.pattern : pattern
|
||||||
|
|
||||||
assertValidPattern(pattern)
|
if (typeof pattern === 'undefined') {
|
||||||
|
throw new TypeError('undefined pattern')
|
||||||
|
}
|
||||||
|
|
||||||
// Thanks to Yeting Li <https://github.com/yetingli> for
|
if (options.nobrace ||
|
||||||
// improving this regexp to avoid a ReDOS vulnerability.
|
!pattern.match(/\{.*\}/)) {
|
||||||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
|
||||||
// shortcut. no need to expand.
|
// shortcut. no need to expand.
|
||||||
return [pattern]
|
return [pattern]
|
||||||
}
|
}
|
||||||
@@ -2160,17 +2146,6 @@ function braceExpand (pattern, options) {
|
|||||||
return expand(pattern)
|
return expand(pattern)
|
||||||
}
|
}
|
||||||
|
|
||||||
var MAX_PATTERN_LENGTH = 1024 * 64
|
|
||||||
var assertValidPattern = function (pattern) {
|
|
||||||
if (typeof pattern !== 'string') {
|
|
||||||
throw new TypeError('invalid pattern')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
|
||||||
throw new TypeError('pattern is too long')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse a component of the expanded set.
|
// parse a component of the expanded set.
|
||||||
// At this point, no pattern may contain "/" in it
|
// At this point, no pattern may contain "/" in it
|
||||||
// so we're going to return a 2d array, where each entry is the full
|
// so we're going to return a 2d array, where each entry is the full
|
||||||
@@ -2185,17 +2160,14 @@ var assertValidPattern = function (pattern) {
|
|||||||
Minimatch.prototype.parse = parse
|
Minimatch.prototype.parse = parse
|
||||||
var SUBPARSE = {}
|
var SUBPARSE = {}
|
||||||
function parse (pattern, isSub) {
|
function parse (pattern, isSub) {
|
||||||
assertValidPattern(pattern)
|
if (pattern.length > 1024 * 64) {
|
||||||
|
throw new TypeError('pattern is too long')
|
||||||
|
}
|
||||||
|
|
||||||
var options = this.options
|
var options = this.options
|
||||||
|
|
||||||
// shortcuts
|
// shortcuts
|
||||||
if (pattern === '**') {
|
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||||||
if (!options.noglobstar)
|
|
||||||
return GLOBSTAR
|
|
||||||
else
|
|
||||||
pattern = '*'
|
|
||||||
}
|
|
||||||
if (pattern === '') return ''
|
if (pattern === '') return ''
|
||||||
|
|
||||||
var re = ''
|
var re = ''
|
||||||
@@ -2251,12 +2223,10 @@ function parse (pattern, isSub) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch (c) {
|
switch (c) {
|
||||||
/* istanbul ignore next */
|
case '/':
|
||||||
case '/': {
|
|
||||||
// completely not allowed, even escaped.
|
// completely not allowed, even escaped.
|
||||||
// Should already be path-split by now.
|
// Should already be path-split by now.
|
||||||
return false
|
return false
|
||||||
}
|
|
||||||
|
|
||||||
case '\\':
|
case '\\':
|
||||||
clearStateChar()
|
clearStateChar()
|
||||||
@@ -2375,6 +2345,7 @@ function parse (pattern, isSub) {
|
|||||||
|
|
||||||
// handle the case where we left a class open.
|
// handle the case where we left a class open.
|
||||||
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||||||
|
if (inClass) {
|
||||||
// split where the last [ was, make sure we don't have
|
// split where the last [ was, make sure we don't have
|
||||||
// an invalid re. if so, re-walk the contents of the
|
// an invalid re. if so, re-walk the contents of the
|
||||||
// would-be class to re-translate any characters that
|
// would-be class to re-translate any characters that
|
||||||
@@ -2393,6 +2364,7 @@ function parse (pattern, isSub) {
|
|||||||
inClass = false
|
inClass = false
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// finish up the class.
|
// finish up the class.
|
||||||
hasMagic = true
|
hasMagic = true
|
||||||
@@ -2475,7 +2447,9 @@ function parse (pattern, isSub) {
|
|||||||
// something that could conceivably capture a dot
|
// something that could conceivably capture a dot
|
||||||
var addPatternStart = false
|
var addPatternStart = false
|
||||||
switch (re.charAt(0)) {
|
switch (re.charAt(0)) {
|
||||||
case '[': case '.': case '(': addPatternStart = true
|
case '.':
|
||||||
|
case '[':
|
||||||
|
case '(': addPatternStart = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hack to work around lack of negative lookbehind in JS
|
// Hack to work around lack of negative lookbehind in JS
|
||||||
@@ -2537,7 +2511,7 @@ function parse (pattern, isSub) {
|
|||||||
var flags = options.nocase ? 'i' : ''
|
var flags = options.nocase ? 'i' : ''
|
||||||
try {
|
try {
|
||||||
var regExp = new RegExp('^' + re + '$', flags)
|
var regExp = new RegExp('^' + re + '$', flags)
|
||||||
} catch (er) /* istanbul ignore next - should be impossible */ {
|
} catch (er) {
|
||||||
// If it was an invalid regular expression, then it can't match
|
// If it was an invalid regular expression, then it can't match
|
||||||
// anything. This trick looks for a character after the end of
|
// anything. This trick looks for a character after the end of
|
||||||
// the string, which is of course impossible, except in multi-line
|
// the string, which is of course impossible, except in multi-line
|
||||||
@@ -2595,7 +2569,7 @@ function makeRe () {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
this.regexp = new RegExp(re, flags)
|
this.regexp = new RegExp(re, flags)
|
||||||
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
} catch (ex) {
|
||||||
this.regexp = false
|
this.regexp = false
|
||||||
}
|
}
|
||||||
return this.regexp
|
return this.regexp
|
||||||
@@ -2613,8 +2587,8 @@ minimatch.match = function (list, pattern, options) {
|
|||||||
return list
|
return list
|
||||||
}
|
}
|
||||||
|
|
||||||
Minimatch.prototype.match = function match (f, partial) {
|
Minimatch.prototype.match = match
|
||||||
if (typeof partial === 'undefined') partial = this.partial
|
function match (f, partial) {
|
||||||
this.debug('match', f, this.pattern)
|
this.debug('match', f, this.pattern)
|
||||||
// short-circuit in the case of busted things.
|
// short-circuit in the case of busted things.
|
||||||
// comments, etc.
|
// comments, etc.
|
||||||
@@ -2696,7 +2670,6 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
|
|
||||||
// should be impossible.
|
// should be impossible.
|
||||||
// some invalid regexp stuff in the set.
|
// some invalid regexp stuff in the set.
|
||||||
/* istanbul ignore if */
|
|
||||||
if (p === false) return false
|
if (p === false) return false
|
||||||
|
|
||||||
if (p === GLOBSTAR) {
|
if (p === GLOBSTAR) {
|
||||||
@@ -2770,7 +2743,6 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
// no match was found.
|
// no match was found.
|
||||||
// However, in partial mode, we can't say this is necessarily over.
|
// However, in partial mode, we can't say this is necessarily over.
|
||||||
// If there's more *pattern* left, then
|
// If there's more *pattern* left, then
|
||||||
/* istanbul ignore if */
|
|
||||||
if (partial) {
|
if (partial) {
|
||||||
// ran out of file
|
// ran out of file
|
||||||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||||
@@ -2784,7 +2756,11 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
// patterns with magic have been turned into regexps.
|
// patterns with magic have been turned into regexps.
|
||||||
var hit
|
var hit
|
||||||
if (typeof p === 'string') {
|
if (typeof p === 'string') {
|
||||||
|
if (options.nocase) {
|
||||||
|
hit = f.toLowerCase() === p.toLowerCase()
|
||||||
|
} else {
|
||||||
hit = f === p
|
hit = f === p
|
||||||
|
}
|
||||||
this.debug('string match', p, f, hit)
|
this.debug('string match', p, f, hit)
|
||||||
} else {
|
} else {
|
||||||
hit = f.match(p)
|
hit = f.match(p)
|
||||||
@@ -2815,16 +2791,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||||||
// this is ok if we're doing the match as part of
|
// this is ok if we're doing the match as part of
|
||||||
// a glob fs traversal.
|
// a glob fs traversal.
|
||||||
return partial
|
return partial
|
||||||
} else /* istanbul ignore else */ if (pi === pl) {
|
} else if (pi === pl) {
|
||||||
// ran out of pattern, still have file left.
|
// ran out of pattern, still have file left.
|
||||||
// this is only acceptable if we're on the very last
|
// this is only acceptable if we're on the very last
|
||||||
// empty segment of a file with a trailing slash.
|
// empty segment of a file with a trailing slash.
|
||||||
// a/* should match a/b/
|
// a/* should match a/b/
|
||||||
return (fi === fl - 1) && (file[fi] === '')
|
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
|
||||||
|
return emptyFileEnd
|
||||||
}
|
}
|
||||||
|
|
||||||
// should be unreachable.
|
// should be unreachable.
|
||||||
/* istanbul ignore next */
|
|
||||||
throw new Error('wtf?')
|
throw new Error('wtf?')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -4964,15 +4940,13 @@ exports.checkBypass = checkBypass;
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.RefKey = exports.Variables = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
exports.RefKey = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
||||||
var Inputs;
|
var Inputs;
|
||||||
(function (Inputs) {
|
(function (Inputs) {
|
||||||
Inputs["Key"] = "key";
|
Inputs["Key"] = "key";
|
||||||
Inputs["Path"] = "path";
|
Inputs["Path"] = "path";
|
||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
|
||||||
Inputs["SaveOnAnyFailure"] = "save-on-any-failure";
|
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
@@ -4989,10 +4963,6 @@ var Events;
|
|||||||
Events["Push"] = "push";
|
Events["Push"] = "push";
|
||||||
Events["PullRequest"] = "pull_request";
|
Events["PullRequest"] = "pull_request";
|
||||||
})(Events = exports.Events || (exports.Events = {}));
|
})(Events = exports.Events || (exports.Events = {}));
|
||||||
var Variables;
|
|
||||||
(function (Variables) {
|
|
||||||
Variables["SaveCacheOnAnyFailure"] = "SAVE_CACHE_ON_ANY_FAILURE";
|
|
||||||
})(Variables = exports.Variables || (exports.Variables = {}));
|
|
||||||
exports.RefKey = "GITHUB_REF";
|
exports.RefKey = "GITHUB_REF";
|
||||||
|
|
||||||
|
|
||||||
@@ -47311,7 +47281,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
const save_only_1 = __webpack_require__(973);
|
|
||||||
const utils = __importStar(__webpack_require__(443));
|
const utils = __importStar(__webpack_require__(443));
|
||||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||||
@@ -47329,9 +47298,7 @@ function run() {
|
|||||||
}
|
}
|
||||||
const state = utils.getCacheState();
|
const state = utils.getCacheState();
|
||||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||||
const primaryKey = save_only_1.saveOnly === true
|
const primaryKey = core.getState(constants_1.State.CachePrimaryKey);
|
||||||
? core.getInput(constants_1.Inputs.Key)
|
|
||||||
: core.getState(constants_1.State.CachePrimaryKey);
|
|
||||||
if (!primaryKey) {
|
if (!primaryKey) {
|
||||||
utils.logWarning(`Error retrieving key from state.`);
|
utils.logWarning(`Error retrieving key from state.`);
|
||||||
return;
|
return;
|
||||||
@@ -47355,6 +47322,7 @@ function run() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
run();
|
||||||
exports.default = run;
|
exports.default = run;
|
||||||
|
|
||||||
|
|
||||||
@@ -55224,67 +55192,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
|||||||
//# sourceMappingURL=internal-path-helper.js.map
|
//# sourceMappingURL=internal-path-helper.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
/* 973 */
|
/* 973 */,
|
||||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
||||||
|
|
||||||
"use strict";
|
|
||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
||||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
||||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
||||||
}
|
|
||||||
Object.defineProperty(o, k2, desc);
|
|
||||||
}) : (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
o[k2] = m[k];
|
|
||||||
}));
|
|
||||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
||||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
||||||
}) : function(o, v) {
|
|
||||||
o["default"] = v;
|
|
||||||
});
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
||||||
__setModuleDefault(result, mod);
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
|
||||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
||||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
||||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
||||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
||||||
});
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.saveOnly = void 0;
|
|
||||||
const core = __importStar(__webpack_require__(470));
|
|
||||||
const constants_1 = __webpack_require__(196);
|
|
||||||
const save_1 = __importDefault(__webpack_require__(681));
|
|
||||||
const utils = __importStar(__webpack_require__(443));
|
|
||||||
function runSaveAction() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
if (!core.getInput(constants_1.Inputs.Key)) {
|
|
||||||
utils.logWarning(`Error retrieving key from inputs.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
exports.saveOnly = true;
|
|
||||||
yield (0, save_1.default)();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
runSaveAction();
|
|
||||||
exports.default = runSaveAction;
|
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
|
||||||
/* 974 */,
|
/* 974 */,
|
||||||
/* 975 */
|
/* 975 */
|
||||||
/***/ (function(__unusedmodule, exports) {
|
/***/ (function(__unusedmodule, exports) {
|
||||||
|
33
package-lock.json
generated
33
package-lock.json
generated
@@ -4055,6 +4055,18 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/eslint-plugin-import/node_modules/minimatch": {
|
||||||
|
"version": "3.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||||
|
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"brace-expansion": "^1.1.7"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/eslint-plugin-import/node_modules/ms": {
|
"node_modules/eslint-plugin-import/node_modules/ms": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
@@ -8117,9 +8129,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/minimatch": {
|
"node_modules/minimatch": {
|
||||||
"version": "3.1.2",
|
"version": "3.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"brace-expansion": "^1.1.7"
|
"brace-expansion": "^1.1.7"
|
||||||
},
|
},
|
||||||
@@ -12742,6 +12754,15 @@
|
|||||||
"has": "^1.0.3"
|
"has": "^1.0.3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"minimatch": {
|
||||||
|
"version": "3.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||||
|
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"brace-expansion": "^1.1.7"
|
||||||
|
}
|
||||||
|
},
|
||||||
"ms": {
|
"ms": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
@@ -15867,9 +15888,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"minimatch": {
|
"minimatch": {
|
||||||
"version": "3.1.2",
|
"version": "3.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"brace-expansion": "^1.1.7"
|
"brace-expansion": "^1.1.7"
|
||||||
}
|
}
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && ncc build -o dist/save-only src/save-only.ts",
|
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
|
||||||
"test": "tsc --noEmit && jest --coverage",
|
"test": "tsc --noEmit && jest --coverage",
|
||||||
"lint": "eslint **/*.ts --cache",
|
"lint": "eslint **/*.ts --cache",
|
||||||
"format": "prettier --write **/*.ts",
|
"format": "prettier --write **/*.ts",
|
||||||
|
@@ -1,27 +0,0 @@
|
|||||||
name: 'Restore Cache'
|
|
||||||
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
|
||||||
author: 'GitHub'
|
|
||||||
inputs:
|
|
||||||
path:
|
|
||||||
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
|
||||||
required: true
|
|
||||||
key:
|
|
||||||
description: 'An explicit key for restoring and saving the cache'
|
|
||||||
required: true
|
|
||||||
restore-keys:
|
|
||||||
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
|
||||||
required: false
|
|
||||||
exit-on-cache-miss:
|
|
||||||
description: 'Fail the workflow if the cache is not found for the primary key'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
outputs:
|
|
||||||
cache-hit:
|
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
|
||||||
runs:
|
|
||||||
using: 'node16'
|
|
||||||
main: '../dist/restore/index.js'
|
|
||||||
branding:
|
|
||||||
icon: 'archive'
|
|
||||||
color: 'gray-dark'
|
|
||||||
|
|
@@ -1,19 +0,0 @@
|
|||||||
name: 'Save Cache'
|
|
||||||
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
|
||||||
author: 'GitHub'
|
|
||||||
inputs:
|
|
||||||
path:
|
|
||||||
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
|
||||||
required: true
|
|
||||||
key:
|
|
||||||
description: 'An explicit key for restoring and saving the cache'
|
|
||||||
required: true
|
|
||||||
upload-chunk-size:
|
|
||||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
|
||||||
required: false
|
|
||||||
runs:
|
|
||||||
using: 'node16'
|
|
||||||
main: '../dist/save/index.js'
|
|
||||||
branding:
|
|
||||||
icon: 'archive'
|
|
||||||
color: 'gray-dark'
|
|
@@ -2,9 +2,7 @@ export enum Inputs {
|
|||||||
Key = "key",
|
Key = "key",
|
||||||
Path = "path",
|
Path = "path",
|
||||||
RestoreKeys = "restore-keys",
|
RestoreKeys = "restore-keys",
|
||||||
UploadChunkSize = "upload-chunk-size",
|
UploadChunkSize = "upload-chunk-size"
|
||||||
FailOnCacheMiss = "fail-on-cache-miss",
|
|
||||||
SaveOnAnyFailure = "save-on-any-failure"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum Outputs {
|
export enum Outputs {
|
||||||
@@ -22,8 +20,4 @@ export enum Events {
|
|||||||
PullRequest = "pull_request"
|
PullRequest = "pull_request"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum Variables {
|
|
||||||
SaveCacheOnAnyFailure = "SAVE_CACHE_ON_ANY_FAILURE"
|
|
||||||
}
|
|
||||||
|
|
||||||
export const RefKey = "GITHUB_REF";
|
export const RefKey = "GITHUB_REF";
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
import * as cache from "@actions/cache";
|
import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, State, Variables } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
async function run(): Promise<void> {
|
async function run(): Promise<void> {
|
||||||
@@ -35,46 +35,22 @@ async function run(): Promise<void> {
|
|||||||
restoreKeys
|
restoreKeys
|
||||||
);
|
);
|
||||||
|
|
||||||
//Check if user wants to save cache despite of failure in any previous job
|
|
||||||
const saveCache = core.getBooleanInput(Inputs.SaveOnAnyFailure);
|
|
||||||
if (saveCache == true) {
|
|
||||||
core.debug(
|
|
||||||
`Exporting environment variable ${Variables.SaveCacheOnAnyFailure}`
|
|
||||||
);
|
|
||||||
core.exportVariable(Variables.SaveCacheOnAnyFailure, saveCache);
|
|
||||||
core.info(
|
|
||||||
`Input Variable ${Variables.SaveCacheOnAnyFailure} is set to true, the cache will be saved despite of any failure in the build.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
if (core.getBooleanInput(Inputs.FailOnCacheMiss) == true) {
|
|
||||||
throw new Error(
|
|
||||||
`Cache with the given input key ${primaryKey} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
core.info(
|
core.info(
|
||||||
`Cache not found for input keys: ${[
|
`Cache not found for input keys: ${[
|
||||||
primaryKey,
|
primaryKey,
|
||||||
...restoreKeys
|
...restoreKeys
|
||||||
].join(", ")}`
|
].join(", ")}`
|
||||||
);
|
);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store the matched cache key
|
// Store the matched cache key
|
||||||
utils.setCacheState(cacheKey);
|
utils.setCacheState(cacheKey);
|
||||||
|
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
|
|
||||||
if (
|
|
||||||
!isExactKeyMatch &&
|
|
||||||
core.getBooleanInput(Inputs.FailOnCacheMiss) == true
|
|
||||||
) {
|
|
||||||
throw new Error(
|
|
||||||
`Restored cache key doesn't match the given input key ${primaryKey}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
core.setFailed((error as Error).message);
|
core.setFailed((error as Error).message);
|
||||||
|
@@ -1,20 +0,0 @@
|
|||||||
import * as core from "@actions/core";
|
|
||||||
|
|
||||||
import { Inputs } from "./constants";
|
|
||||||
import save from "./save";
|
|
||||||
import * as utils from "./utils/actionUtils";
|
|
||||||
|
|
||||||
async function runSaveAction(): Promise<void> {
|
|
||||||
if (!core.getInput(Inputs.Key)) {
|
|
||||||
utils.logWarning(`Error retrieving key from inputs.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
saveOnly = true;
|
|
||||||
|
|
||||||
await save();
|
|
||||||
}
|
|
||||||
|
|
||||||
runSaveAction();
|
|
||||||
|
|
||||||
export default runSaveAction;
|
|
||||||
export let saveOnly: boolean;
|
|
@@ -2,7 +2,6 @@ import * as cache from "@actions/cache";
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
import { saveOnly } from "./save-only";
|
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||||
@@ -28,11 +27,7 @@ async function run(): Promise<void> {
|
|||||||
const state = utils.getCacheState();
|
const state = utils.getCacheState();
|
||||||
|
|
||||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||||
const primaryKey =
|
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||||
saveOnly === true
|
|
||||||
? core.getInput(Inputs.Key)
|
|
||||||
: core.getState(State.CachePrimaryKey);
|
|
||||||
|
|
||||||
if (!primaryKey) {
|
if (!primaryKey) {
|
||||||
utils.logWarning(`Error retrieving key from state.`);
|
utils.logWarning(`Error retrieving key from state.`);
|
||||||
return;
|
return;
|
||||||
@@ -61,4 +56,6 @@ async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
run();
|
||||||
|
|
||||||
export default run;
|
export default run;
|
||||||
|
@@ -13,28 +13,18 @@ interface CacheInput {
|
|||||||
path: string;
|
path: string;
|
||||||
key: string;
|
key: string;
|
||||||
restoreKeys?: string[];
|
restoreKeys?: string[];
|
||||||
failOnCacheMiss?: boolean;
|
|
||||||
saveOnAnyFailure?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setInputs(input: CacheInput): void {
|
export function setInputs(input: CacheInput): void {
|
||||||
setInput(Inputs.Path, input.path);
|
setInput(Inputs.Path, input.path);
|
||||||
setInput(Inputs.Key, input.key);
|
setInput(Inputs.Key, input.key);
|
||||||
setInput(Inputs.SaveOnAnyFailure, "false");
|
|
||||||
setInput(Inputs.FailOnCacheMiss, "false");
|
|
||||||
input.restoreKeys &&
|
input.restoreKeys &&
|
||||||
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
||||||
input.failOnCacheMiss &&
|
|
||||||
setInput(Inputs.FailOnCacheMiss, String(input.failOnCacheMiss));
|
|
||||||
input.saveOnAnyFailure &&
|
|
||||||
setInput(Inputs.SaveOnAnyFailure, String(input.saveOnAnyFailure));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function clearInputs(): void {
|
export function clearInputs(): void {
|
||||||
delete process.env[getInputName(Inputs.Path)];
|
delete process.env[getInputName(Inputs.Path)];
|
||||||
delete process.env[getInputName(Inputs.Key)];
|
delete process.env[getInputName(Inputs.Key)];
|
||||||
delete process.env[getInputName(Inputs.RestoreKeys)];
|
delete process.env[getInputName(Inputs.RestoreKeys)];
|
||||||
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
|
|
||||||
delete process.env[getInputName(Inputs.SaveOnAnyFailure)];
|
|
||||||
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user