mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-30 23:59:16 +01:00
Add support to opt-in enable cross-os caching on windows (#1056)
* Add support to opt-in enable cross-os caching on windows * Fix tests * Address review comments and update tests * Fix tests * Address review comments * Address review comments
This commit is contained in:
parent
1f414295fe
commit
6fd2d4538c
22 changed files with 1172 additions and 496 deletions
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
name: "@actions/cache"
|
name: "@actions/cache"
|
||||||
version: 3.1.1
|
version: 3.1.2
|
||||||
type: npm
|
type: npm
|
||||||
summary:
|
summary:
|
||||||
homepage:
|
homepage:
|
||||||
|
|
|
@ -174,6 +174,26 @@ test("getInputAsInt throws if required and value missing", () => {
|
||||||
).toThrowError();
|
).toThrowError();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("getInputAsBool returns false if input not set", () => {
|
||||||
|
expect(actionUtils.getInputAsBool("undefined")).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getInputAsBool returns value if input is valid", () => {
|
||||||
|
testUtils.setInput("foo", "true");
|
||||||
|
expect(actionUtils.getInputAsBool("foo")).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getInputAsBool returns false if input is invalid or NaN", () => {
|
||||||
|
testUtils.setInput("foo", "bar");
|
||||||
|
expect(actionUtils.getInputAsBool("foo")).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getInputAsBool throws if required and value missing", () => {
|
||||||
|
expect(() =>
|
||||||
|
actionUtils.getInputAsBool("undefined2", { required: true })
|
||||||
|
).toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
test("isCacheFeatureAvailable for ac enabled", () => {
|
test("isCacheFeatureAvailable for ac enabled", () => {
|
||||||
jest.spyOn(cache, "isFeatureAvailable").mockImplementation(() => true);
|
jest.spyOn(cache, "isFeatureAvailable").mockImplementation(() => true);
|
||||||
|
|
||||||
|
|
|
@ -27,9 +27,17 @@ beforeAll(() => {
|
||||||
return actualUtils.getInputAsArray(name, options);
|
return actualUtils.getInputAsArray(name, options);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsBool").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getInputAsBool(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
process.env[Events.Key] = Events.Push;
|
process.env[Events.Key] = Events.Push;
|
||||||
process.env[RefKey] = "refs/heads/feature-branch";
|
process.env[RefKey] = "refs/heads/feature-branch";
|
||||||
|
|
||||||
|
@ -50,7 +58,8 @@ test("restore with no cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -65,7 +74,7 @@ test("restore with no cache found", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(stateMock).toHaveBeenCalledTimes(1);
|
expect(stateMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -84,7 +93,8 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey],
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -99,7 +109,13 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
[path],
|
||||||
|
key,
|
||||||
|
[restoreKey],
|
||||||
|
{},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(stateMock).toHaveBeenCalledTimes(1);
|
expect(stateMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -116,7 +132,8 @@ test("restore with cache found for key", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -132,7 +149,7 @@ test("restore with cache found for key", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key);
|
||||||
|
@ -152,7 +169,8 @@ test("restore with cache found for restore key", async () => {
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey],
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -168,7 +186,13 @@ test("restore with cache found for restore key", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
[path],
|
||||||
|
key,
|
||||||
|
[restoreKey],
|
||||||
|
{},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", restoreKey);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", restoreKey);
|
||||||
|
|
|
@ -28,9 +28,17 @@ beforeAll(() => {
|
||||||
return actualUtils.getInputAsArray(name, options);
|
return actualUtils.getInputAsArray(name, options);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsBool").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getInputAsBool(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
process.env[Events.Key] = Events.Push;
|
process.env[Events.Key] = Events.Push;
|
||||||
process.env[RefKey] = "refs/heads/feature-branch";
|
process.env[RefKey] = "refs/heads/feature-branch";
|
||||||
|
|
||||||
|
@ -97,7 +105,8 @@ test("restore on GHES with AC available ", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -113,7 +122,7 @@ test("restore on GHES with AC available ", async () => {
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -152,13 +161,20 @@ test("restore with too many keys should fail", async () => {
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key,
|
key,
|
||||||
restoreKeys
|
restoreKeys,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys);
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
[path],
|
||||||
|
key,
|
||||||
|
restoreKeys,
|
||||||
|
{},
|
||||||
|
false
|
||||||
|
);
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||||
);
|
);
|
||||||
|
@ -169,13 +185,14 @@ test("restore with large key should fail", async () => {
|
||||||
const key = "foo".repeat(512); // Over the 512 character limit
|
const key = "foo".repeat(512); // Over the 512 character limit
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||||
);
|
);
|
||||||
|
@ -186,13 +203,14 @@ test("restore with invalid key should fail", async () => {
|
||||||
const key = "comma,comma";
|
const key = "comma,comma";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: ${key} cannot contain commas.`
|
`Key Validation Error: ${key} cannot contain commas.`
|
||||||
);
|
);
|
||||||
|
@ -203,7 +221,8 @@ test("restore with no cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -218,7 +237,7 @@ test("restore with no cache found", async () => {
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
@ -235,7 +254,8 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey],
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -250,7 +270,13 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
[path],
|
||||||
|
key,
|
||||||
|
[restoreKey],
|
||||||
|
{},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
@ -265,7 +291,8 @@ test("restore with cache found for key", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -281,7 +308,7 @@ test("restore with cache found for key", async () => {
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -298,7 +325,8 @@ test("restore with cache found for restore key", async () => {
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey],
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -314,7 +342,13 @@ test("restore with cache found for restore key", async () => {
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
[path],
|
||||||
|
key,
|
||||||
|
[restoreKey],
|
||||||
|
{},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
|
|
@ -27,9 +27,18 @@ beforeAll(() => {
|
||||||
return actualUtils.getInputAsArray(name, options);
|
return actualUtils.getInputAsArray(name, options);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsBool").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
return jest
|
||||||
|
.requireActual("../src/utils/actionUtils")
|
||||||
|
.getInputAsBool(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
process.env[Events.Key] = Events.Push;
|
process.env[Events.Key] = Events.Push;
|
||||||
process.env[RefKey] = "refs/heads/feature-branch";
|
process.env[RefKey] = "refs/heads/feature-branch";
|
||||||
|
|
||||||
|
@ -50,7 +59,8 @@ test("restore with no cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -65,7 +75,7 @@ test("restore with no cache found", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
|
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||||
expect(outputMock).toHaveBeenCalledTimes(1);
|
expect(outputMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -83,7 +93,8 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey],
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -98,7 +109,13 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
[path],
|
||||||
|
key,
|
||||||
|
[restoreKey],
|
||||||
|
{},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
@ -113,7 +130,8 @@ test("restore with cache found for key", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key
|
key,
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -128,7 +146,7 @@ test("restore with cache found for key", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
|
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-hit", "true");
|
expect(outputMock).toHaveBeenCalledWith("cache-hit", "true");
|
||||||
|
@ -147,7 +165,8 @@ test("restore with cache found for restore key", async () => {
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: path,
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey],
|
||||||
|
enableCrossOsArchive: false
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
@ -162,7 +181,13 @@ test("restore with cache found for restore key", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
[path],
|
||||||
|
key,
|
||||||
|
[restoreKey],
|
||||||
|
{},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-hit", "false");
|
expect(outputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||||
|
|
|
@ -35,6 +35,14 @@ beforeAll(() => {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsBool").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
return jest
|
||||||
|
.requireActual("../src/utils/actionUtils")
|
||||||
|
.getInputAsBool(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||||
(key, cacheResult) => {
|
(key, cacheResult) => {
|
||||||
return jest
|
return jest
|
||||||
|
@ -95,9 +103,14 @@ test("save with valid inputs uploads a cache", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
uploadChunkSize: 4000000
|
[inputPath],
|
||||||
});
|
primaryKey,
|
||||||
|
{
|
||||||
|
uploadChunkSize: 4000000
|
||||||
|
},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
|
@ -32,6 +32,14 @@ beforeAll(() => {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsBool").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
return jest
|
||||||
|
.requireActual("../src/utils/actionUtils")
|
||||||
|
.getInputAsBool(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||||
(key, cacheResult) => {
|
(key, cacheResult) => {
|
||||||
return jest
|
return jest
|
||||||
|
@ -47,6 +55,7 @@ beforeAll(() => {
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
process.env[Events.Key] = Events.Push;
|
process.env[Events.Key] = Events.Push;
|
||||||
process.env[RefKey] = "refs/heads/feature-branch";
|
process.env[RefKey] = "refs/heads/feature-branch";
|
||||||
|
|
||||||
|
@ -155,9 +164,14 @@ test("save on GHES with AC available", async () => {
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
uploadChunkSize: 4000000
|
[inputPath],
|
||||||
});
|
primaryKey,
|
||||||
|
{
|
||||||
|
uploadChunkSize: 4000000
|
||||||
|
},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
@ -251,7 +265,8 @@ test("save with large cache outputs warning", async () => {
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
[inputPath],
|
[inputPath],
|
||||||
primaryKey,
|
primaryKey,
|
||||||
expect.anything()
|
expect.anything(),
|
||||||
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -297,7 +312,8 @@ test("save with reserve cache failure outputs warning", async () => {
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
[inputPath],
|
[inputPath],
|
||||||
primaryKey,
|
primaryKey,
|
||||||
expect.anything()
|
expect.anything(),
|
||||||
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
|
@ -339,7 +355,8 @@ test("save with server error outputs warning", async () => {
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
[inputPath],
|
[inputPath],
|
||||||
primaryKey,
|
primaryKey,
|
||||||
expect.anything()
|
expect.anything(),
|
||||||
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -378,9 +395,14 @@ test("save with valid inputs uploads a cache", async () => {
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
uploadChunkSize: 4000000
|
[inputPath],
|
||||||
});
|
primaryKey,
|
||||||
|
{
|
||||||
|
uploadChunkSize: 4000000
|
||||||
|
},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
|
@ -35,6 +35,14 @@ beforeAll(() => {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsBool").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
return jest
|
||||||
|
.requireActual("../src/utils/actionUtils")
|
||||||
|
.getInputAsBool(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||||
(key, cacheResult) => {
|
(key, cacheResult) => {
|
||||||
return jest
|
return jest
|
||||||
|
@ -85,9 +93,14 @@ test("save with valid inputs uploads a cache", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
uploadChunkSize: 4000000
|
[inputPath],
|
||||||
});
|
primaryKey,
|
||||||
|
{
|
||||||
|
uploadChunkSize: 4000000
|
||||||
|
},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
@ -112,9 +125,14 @@ test("save failing logs the warning message", async () => {
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||||
uploadChunkSize: 4000000
|
[inputPath],
|
||||||
});
|
primaryKey,
|
||||||
|
{
|
||||||
|
uploadChunkSize: 4000000
|
||||||
|
},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
expect(warningMock).toHaveBeenCalledTimes(1);
|
expect(warningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(warningMock).toHaveBeenCalledWith("Cache save failed.");
|
expect(warningMock).toHaveBeenCalledWith("Cache save failed.");
|
||||||
|
|
|
@ -14,6 +14,10 @@ inputs:
|
||||||
upload-chunk-size:
|
upload-chunk-size:
|
||||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||||
required: false
|
required: false
|
||||||
|
enableCrossOsArchive:
|
||||||
|
description: 'An optional boolean when enabled, allows windows runners to save or restore caches that can be restored or saved respectively on other platforms'
|
||||||
|
default: 'false'
|
||||||
|
required: false
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
|
|
335
dist/restore-only/index.js
vendored
335
dist/restore-only/index.js
vendored
|
@ -1177,10 +1177,6 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
|
||||||
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
|
||||||
return constants_1.CompressionMethod.Gzip;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function isGnuTarInstalled() {
|
function getGnuTarPathOnWindows() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||||
|
return constants_1.GnuTarPathOnWindows;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar');
|
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.isGnuTarInstalled = isGnuTarInstalled;
|
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3384,7 +3383,6 @@ const crypto = __importStar(__webpack_require__(417));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const url_1 = __webpack_require__(414);
|
const url_1 = __webpack_require__(414);
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
const downloadUtils_1 = __webpack_require__(251);
|
const downloadUtils_1 = __webpack_require__(251);
|
||||||
const options_1 = __webpack_require__(538);
|
const options_1 = __webpack_require__(538);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
@ -3414,10 +3412,17 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion(paths, compressionMethod) {
|
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||||
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip
|
const components = paths;
|
||||||
? []
|
// Add compression method to cache version to restore
|
||||||
: [compressionMethod]);
|
// compressed cache as per compression method
|
||||||
|
if (compressionMethod) {
|
||||||
|
components.push(compressionMethod);
|
||||||
|
}
|
||||||
|
// Only check for windows platforms if enableCrossOsArchive is false
|
||||||
|
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||||
|
components.push('windows-only');
|
||||||
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto
|
return crypto
|
||||||
|
@ -3429,9 +3434,10 @@ exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
|
// Cache not found
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3445,6 +3451,7 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -3490,7 +3497,7 @@ exports.downloadCache = downloadCache;
|
||||||
function reserveCache(key, paths, options) {
|
function reserveCache(key, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
version,
|
version,
|
||||||
|
@ -4970,7 +4977,8 @@ var Inputs;
|
||||||
Inputs["Key"] = "key";
|
Inputs["Key"] = "key";
|
||||||
Inputs["Path"] = "path";
|
Inputs["Path"] = "path";
|
||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
|
@ -10066,7 +10074,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
|
@ -10109,6 +10117,11 @@ function getInputAsInt(name, options) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
exports.getInputAsInt = getInputAsInt;
|
exports.getInputAsInt = getInputAsInt;
|
||||||
|
function getInputAsBool(name, options) {
|
||||||
|
const result = core.getInput(name, options);
|
||||||
|
return result.toLowerCase() === "true";
|
||||||
|
}
|
||||||
|
exports.getInputAsBool = getInputAsBool;
|
||||||
function isCacheFeatureAvailable() {
|
function isCacheFeatureAvailable() {
|
||||||
if (cache.isFeatureAvailable()) {
|
if (cache.isFeatureAvailable()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -38216,27 +38229,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
|
const core_1 = __webpack_require__(470);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
function getTarPath(args, compressionMethod) {
|
core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
|
||||||
|
// Returns tar path and type: BSD or GNU
|
||||||
|
function getTarPath() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
||||||
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
const systemTar = constants_1.SystemTarPathOnWindows;
|
||||||
// We only use zstandard compression on windows when gnu tar is installed due to
|
if (gnuTar) {
|
||||||
// a bug with compressing large files with bsdtar + zstd
|
// Use GNUtar as default on windows
|
||||||
args.push('--force-local');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
||||||
}
|
|
||||||
else if (yield utils.isGnuTarInstalled()) {
|
|
||||||
args.push('--force-local');
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38244,25 +38257,92 @@ function getTarPath(args, compressionMethod) {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
args.push('--delay-directory-restore');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
return gnuTar;
|
}
|
||||||
|
else {
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.BSD
|
||||||
|
};
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return yield io.which('tar', true);
|
// Default assumption is GNU tar is present in path
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.GNU
|
||||||
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function execTar(args, compressionMethod, cwd) {
|
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
||||||
|
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
const args = [`"${tarPath.path}"`];
|
||||||
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const tarFile = 'cache.tar';
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// Speficic args for BSD tar on windows for workaround
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
// Method specific args
|
||||||
|
switch (type) {
|
||||||
|
case 'create':
|
||||||
|
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
||||||
|
break;
|
||||||
|
case 'extract':
|
||||||
|
args.push('-xf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
||||||
|
break;
|
||||||
|
case 'list':
|
||||||
|
args.push('-tf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
catch (error) {
|
// Platform specific args
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
||||||
|
switch (process.platform) {
|
||||||
|
case 'win32':
|
||||||
|
args.push('--force-local');
|
||||||
|
break;
|
||||||
|
case 'darwin':
|
||||||
|
args.push('--delay-directory-restore');
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return args;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Returns commands to run tar and compression program
|
||||||
|
function getCommands(compressionMethod, type, archivePath = '') {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let args;
|
||||||
|
const tarPath = yield getTarPath();
|
||||||
|
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||||
|
const compressionArgs = type !== 'create'
|
||||||
|
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
||||||
|
: yield getCompressionProgram(tarPath, compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
|
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||||
|
}
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
return [args.join(' ')];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38270,91 +38350,116 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getCompressionProgram(compressionMethod) {
|
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
||||||
// -d: Decompress.
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// -d: Decompress.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
switch (compressionMethod) {
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
case constants_1.CompressionMethod.Zstd:
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
return [
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
'--use-compress-program',
|
IS_WINDOWS;
|
||||||
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
switch (compressionMethod) {
|
||||||
];
|
case constants_1.CompressionMethod.Zstd:
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
return BSD_TAR_ZSTD
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
? [
|
||||||
default:
|
'zstd -d --long=30 --force -o',
|
||||||
return ['-z'];
|
constants_1.TarFilename,
|
||||||
}
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -d --force -o',
|
||||||
|
constants_1.TarFilename,
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
// Used for creating the archive
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram(tarPath, compressionMethod) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --long=30 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Executes all commands as separate processes
|
||||||
|
function execCommands(commands, cwd) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
for (const command of commands) {
|
||||||
|
try {
|
||||||
|
yield exec_1.exec(command, undefined, { cwd });
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// List the contents of a tar
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-tf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P'
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
|
// Extract a tar
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-xf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
|
// Create a tar
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
const manifestFilename = 'manifest.txt';
|
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
const commands = yield getCommands(compressionMethod, 'create');
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
yield execCommands(commands, archiveFolder);
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram() {
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const args = [
|
|
||||||
'--posix',
|
|
||||||
...getCompressionProgram(),
|
|
||||||
'-cf',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--exclude',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--files-from',
|
|
||||||
manifestFilename
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod, archiveFolder);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -47150,9 +47255,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
* @param primaryKey an explicit key for restoring the cache
|
* @param primaryKey an explicit key for restoring the cache
|
||||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||||
* @param downloadOptions cache download options
|
* @param downloadOptions cache download options
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
restoreKeys = restoreKeys || [];
|
restoreKeys = restoreKeys || [];
|
||||||
|
@ -47170,7 +47276,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod,
|
||||||
|
enableCrossOsArchive
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// Cache not found
|
// Cache not found
|
||||||
|
@ -47217,10 +47324,11 @@ exports.restoreCache = restoreCache;
|
||||||
*
|
*
|
||||||
* @param paths a list of file paths to be cached
|
* @param paths a list of file paths to be cached
|
||||||
* @param key an explicit key for restoring the cache
|
* @param key an explicit key for restoring the cache
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||||
* @param options cache upload options
|
* @param options cache upload options
|
||||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||||
*/
|
*/
|
||||||
function saveCache(paths, key, options) {
|
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
var _a, _b, _c, _d, _e;
|
var _a, _b, _c, _d, _e;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
|
@ -47251,6 +47359,7 @@ function saveCache(paths, key, options) {
|
||||||
core.debug('Reserving Cache');
|
core.debug('Reserving Cache');
|
||||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||||
compressionMethod,
|
compressionMethod,
|
||||||
|
enableCrossOsArchive,
|
||||||
cacheSize: archiveFileSize
|
cacheSize: archiveFileSize
|
||||||
});
|
});
|
||||||
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
||||||
|
@ -50385,7 +50494,8 @@ function restoreImpl(stateProvider) {
|
||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
||||||
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive);
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
core.info(`Cache not found for input keys: ${[
|
core.info(`Cache not found for input keys: ${[
|
||||||
primaryKey,
|
primaryKey,
|
||||||
|
@ -53255,6 +53365,11 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
|
var ArchiveToolType;
|
||||||
|
(function (ArchiveToolType) {
|
||||||
|
ArchiveToolType["GNU"] = "gnu";
|
||||||
|
ArchiveToolType["BSD"] = "bsd";
|
||||||
|
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53263,6 +53378,12 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
|
// The default path of GNUtar on hosted Windows runners
|
||||||
|
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
||||||
|
// The default path of BSDtar on hosted Windows runners
|
||||||
|
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
||||||
|
exports.TarFilename = 'cache.tar';
|
||||||
|
exports.ManifestFilename = 'manifest.txt';
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
335
dist/restore/index.js
vendored
335
dist/restore/index.js
vendored
|
@ -1177,10 +1177,6 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
|
||||||
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
|
||||||
return constants_1.CompressionMethod.Gzip;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function isGnuTarInstalled() {
|
function getGnuTarPathOnWindows() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||||
|
return constants_1.GnuTarPathOnWindows;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar');
|
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.isGnuTarInstalled = isGnuTarInstalled;
|
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3384,7 +3383,6 @@ const crypto = __importStar(__webpack_require__(417));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const url_1 = __webpack_require__(414);
|
const url_1 = __webpack_require__(414);
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
const downloadUtils_1 = __webpack_require__(251);
|
const downloadUtils_1 = __webpack_require__(251);
|
||||||
const options_1 = __webpack_require__(538);
|
const options_1 = __webpack_require__(538);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
@ -3414,10 +3412,17 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion(paths, compressionMethod) {
|
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||||
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip
|
const components = paths;
|
||||||
? []
|
// Add compression method to cache version to restore
|
||||||
: [compressionMethod]);
|
// compressed cache as per compression method
|
||||||
|
if (compressionMethod) {
|
||||||
|
components.push(compressionMethod);
|
||||||
|
}
|
||||||
|
// Only check for windows platforms if enableCrossOsArchive is false
|
||||||
|
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||||
|
components.push('windows-only');
|
||||||
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto
|
return crypto
|
||||||
|
@ -3429,9 +3434,10 @@ exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
|
// Cache not found
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3445,6 +3451,7 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -3490,7 +3497,7 @@ exports.downloadCache = downloadCache;
|
||||||
function reserveCache(key, paths, options) {
|
function reserveCache(key, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
version,
|
version,
|
||||||
|
@ -4970,7 +4977,8 @@ var Inputs;
|
||||||
Inputs["Key"] = "key";
|
Inputs["Key"] = "key";
|
||||||
Inputs["Path"] = "path";
|
Inputs["Path"] = "path";
|
||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
|
@ -38129,27 +38137,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
|
const core_1 = __webpack_require__(470);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
function getTarPath(args, compressionMethod) {
|
core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
|
||||||
|
// Returns tar path and type: BSD or GNU
|
||||||
|
function getTarPath() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
||||||
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
const systemTar = constants_1.SystemTarPathOnWindows;
|
||||||
// We only use zstandard compression on windows when gnu tar is installed due to
|
if (gnuTar) {
|
||||||
// a bug with compressing large files with bsdtar + zstd
|
// Use GNUtar as default on windows
|
||||||
args.push('--force-local');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
||||||
}
|
|
||||||
else if (yield utils.isGnuTarInstalled()) {
|
|
||||||
args.push('--force-local');
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38157,25 +38165,92 @@ function getTarPath(args, compressionMethod) {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
args.push('--delay-directory-restore');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
return gnuTar;
|
}
|
||||||
|
else {
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.BSD
|
||||||
|
};
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return yield io.which('tar', true);
|
// Default assumption is GNU tar is present in path
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.GNU
|
||||||
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function execTar(args, compressionMethod, cwd) {
|
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
||||||
|
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
const args = [`"${tarPath.path}"`];
|
||||||
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const tarFile = 'cache.tar';
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// Speficic args for BSD tar on windows for workaround
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
// Method specific args
|
||||||
|
switch (type) {
|
||||||
|
case 'create':
|
||||||
|
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
||||||
|
break;
|
||||||
|
case 'extract':
|
||||||
|
args.push('-xf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
||||||
|
break;
|
||||||
|
case 'list':
|
||||||
|
args.push('-tf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
catch (error) {
|
// Platform specific args
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
||||||
|
switch (process.platform) {
|
||||||
|
case 'win32':
|
||||||
|
args.push('--force-local');
|
||||||
|
break;
|
||||||
|
case 'darwin':
|
||||||
|
args.push('--delay-directory-restore');
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return args;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Returns commands to run tar and compression program
|
||||||
|
function getCommands(compressionMethod, type, archivePath = '') {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let args;
|
||||||
|
const tarPath = yield getTarPath();
|
||||||
|
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||||
|
const compressionArgs = type !== 'create'
|
||||||
|
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
||||||
|
: yield getCompressionProgram(tarPath, compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
|
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||||
|
}
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
return [args.join(' ')];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38183,91 +38258,116 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getCompressionProgram(compressionMethod) {
|
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
||||||
// -d: Decompress.
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// -d: Decompress.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
switch (compressionMethod) {
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
case constants_1.CompressionMethod.Zstd:
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
return [
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
'--use-compress-program',
|
IS_WINDOWS;
|
||||||
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
switch (compressionMethod) {
|
||||||
];
|
case constants_1.CompressionMethod.Zstd:
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
return BSD_TAR_ZSTD
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
? [
|
||||||
default:
|
'zstd -d --long=30 --force -o',
|
||||||
return ['-z'];
|
constants_1.TarFilename,
|
||||||
}
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -d --force -o',
|
||||||
|
constants_1.TarFilename,
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
// Used for creating the archive
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram(tarPath, compressionMethod) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --long=30 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Executes all commands as separate processes
|
||||||
|
function execCommands(commands, cwd) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
for (const command of commands) {
|
||||||
|
try {
|
||||||
|
yield exec_1.exec(command, undefined, { cwd });
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// List the contents of a tar
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-tf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P'
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
|
// Extract a tar
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-xf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
|
// Create a tar
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
const manifestFilename = 'manifest.txt';
|
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
const commands = yield getCommands(compressionMethod, 'create');
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
yield execCommands(commands, archiveFolder);
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram() {
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const args = [
|
|
||||||
'--posix',
|
|
||||||
...getCompressionProgram(),
|
|
||||||
'-cf',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--exclude',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--files-from',
|
|
||||||
manifestFilename
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod, archiveFolder);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -38502,7 +38602,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
|
@ -38545,6 +38645,11 @@ function getInputAsInt(name, options) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
exports.getInputAsInt = getInputAsInt;
|
exports.getInputAsInt = getInputAsInt;
|
||||||
|
function getInputAsBool(name, options) {
|
||||||
|
const result = core.getInput(name, options);
|
||||||
|
return result.toLowerCase() === "true";
|
||||||
|
}
|
||||||
|
exports.getInputAsBool = getInputAsBool;
|
||||||
function isCacheFeatureAvailable() {
|
function isCacheFeatureAvailable() {
|
||||||
if (cache.isFeatureAvailable()) {
|
if (cache.isFeatureAvailable()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -47121,9 +47226,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
* @param primaryKey an explicit key for restoring the cache
|
* @param primaryKey an explicit key for restoring the cache
|
||||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||||
* @param downloadOptions cache download options
|
* @param downloadOptions cache download options
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
restoreKeys = restoreKeys || [];
|
restoreKeys = restoreKeys || [];
|
||||||
|
@ -47141,7 +47247,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod,
|
||||||
|
enableCrossOsArchive
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// Cache not found
|
// Cache not found
|
||||||
|
@ -47188,10 +47295,11 @@ exports.restoreCache = restoreCache;
|
||||||
*
|
*
|
||||||
* @param paths a list of file paths to be cached
|
* @param paths a list of file paths to be cached
|
||||||
* @param key an explicit key for restoring the cache
|
* @param key an explicit key for restoring the cache
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||||
* @param options cache upload options
|
* @param options cache upload options
|
||||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||||
*/
|
*/
|
||||||
function saveCache(paths, key, options) {
|
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
var _a, _b, _c, _d, _e;
|
var _a, _b, _c, _d, _e;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
|
@ -47222,6 +47330,7 @@ function saveCache(paths, key, options) {
|
||||||
core.debug('Reserving Cache');
|
core.debug('Reserving Cache');
|
||||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||||
compressionMethod,
|
compressionMethod,
|
||||||
|
enableCrossOsArchive,
|
||||||
cacheSize: archiveFileSize
|
cacheSize: archiveFileSize
|
||||||
});
|
});
|
||||||
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
||||||
|
@ -50385,7 +50494,8 @@ function restoreImpl(stateProvider) {
|
||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
||||||
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive);
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
core.info(`Cache not found for input keys: ${[
|
core.info(`Cache not found for input keys: ${[
|
||||||
primaryKey,
|
primaryKey,
|
||||||
|
@ -53255,6 +53365,11 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
|
var ArchiveToolType;
|
||||||
|
(function (ArchiveToolType) {
|
||||||
|
ArchiveToolType["GNU"] = "gnu";
|
||||||
|
ArchiveToolType["BSD"] = "bsd";
|
||||||
|
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53263,6 +53378,12 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
|
// The default path of GNUtar on hosted Windows runners
|
||||||
|
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
||||||
|
// The default path of BSDtar on hosted Windows runners
|
||||||
|
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
||||||
|
exports.TarFilename = 'cache.tar';
|
||||||
|
exports.ManifestFilename = 'manifest.txt';
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
337
dist/save-only/index.js
vendored
337
dist/save-only/index.js
vendored
|
@ -1233,10 +1233,6 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
|
||||||
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
|
||||||
return constants_1.CompressionMethod.Gzip;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1260,13 +1256,16 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function isGnuTarInstalled() {
|
function getGnuTarPathOnWindows() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||||
|
return constants_1.GnuTarPathOnWindows;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar');
|
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.isGnuTarInstalled = isGnuTarInstalled;
|
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3440,7 +3439,6 @@ const crypto = __importStar(__webpack_require__(417));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const url_1 = __webpack_require__(835);
|
const url_1 = __webpack_require__(835);
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
const downloadUtils_1 = __webpack_require__(251);
|
const downloadUtils_1 = __webpack_require__(251);
|
||||||
const options_1 = __webpack_require__(538);
|
const options_1 = __webpack_require__(538);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
@ -3470,10 +3468,17 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion(paths, compressionMethod) {
|
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||||
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip
|
const components = paths;
|
||||||
? []
|
// Add compression method to cache version to restore
|
||||||
: [compressionMethod]);
|
// compressed cache as per compression method
|
||||||
|
if (compressionMethod) {
|
||||||
|
components.push(compressionMethod);
|
||||||
|
}
|
||||||
|
// Only check for windows platforms if enableCrossOsArchive is false
|
||||||
|
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||||
|
components.push('windows-only');
|
||||||
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto
|
return crypto
|
||||||
|
@ -3485,9 +3490,10 @@ exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
|
// Cache not found
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3501,6 +3507,7 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -3546,7 +3553,7 @@ exports.downloadCache = downloadCache;
|
||||||
function reserveCache(key, paths, options) {
|
function reserveCache(key, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
version,
|
version,
|
||||||
|
@ -5026,7 +5033,8 @@ var Inputs;
|
||||||
Inputs["Key"] = "key";
|
Inputs["Key"] = "key";
|
||||||
Inputs["Path"] = "path";
|
Inputs["Path"] = "path";
|
||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
|
@ -38180,27 +38188,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
|
const core_1 = __webpack_require__(470);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
function getTarPath(args, compressionMethod) {
|
core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
|
||||||
|
// Returns tar path and type: BSD or GNU
|
||||||
|
function getTarPath() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
||||||
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
const systemTar = constants_1.SystemTarPathOnWindows;
|
||||||
// We only use zstandard compression on windows when gnu tar is installed due to
|
if (gnuTar) {
|
||||||
// a bug with compressing large files with bsdtar + zstd
|
// Use GNUtar as default on windows
|
||||||
args.push('--force-local');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
||||||
}
|
|
||||||
else if (yield utils.isGnuTarInstalled()) {
|
|
||||||
args.push('--force-local');
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38208,25 +38216,92 @@ function getTarPath(args, compressionMethod) {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
args.push('--delay-directory-restore');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
return gnuTar;
|
}
|
||||||
|
else {
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.BSD
|
||||||
|
};
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return yield io.which('tar', true);
|
// Default assumption is GNU tar is present in path
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.GNU
|
||||||
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function execTar(args, compressionMethod, cwd) {
|
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
||||||
|
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
const args = [`"${tarPath.path}"`];
|
||||||
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const tarFile = 'cache.tar';
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// Speficic args for BSD tar on windows for workaround
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
// Method specific args
|
||||||
|
switch (type) {
|
||||||
|
case 'create':
|
||||||
|
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
||||||
|
break;
|
||||||
|
case 'extract':
|
||||||
|
args.push('-xf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
||||||
|
break;
|
||||||
|
case 'list':
|
||||||
|
args.push('-tf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
catch (error) {
|
// Platform specific args
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
||||||
|
switch (process.platform) {
|
||||||
|
case 'win32':
|
||||||
|
args.push('--force-local');
|
||||||
|
break;
|
||||||
|
case 'darwin':
|
||||||
|
args.push('--delay-directory-restore');
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return args;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Returns commands to run tar and compression program
|
||||||
|
function getCommands(compressionMethod, type, archivePath = '') {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let args;
|
||||||
|
const tarPath = yield getTarPath();
|
||||||
|
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||||
|
const compressionArgs = type !== 'create'
|
||||||
|
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
||||||
|
: yield getCompressionProgram(tarPath, compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
|
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||||
|
}
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
return [args.join(' ')];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38234,91 +38309,116 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getCompressionProgram(compressionMethod) {
|
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
||||||
// -d: Decompress.
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// -d: Decompress.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
switch (compressionMethod) {
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
case constants_1.CompressionMethod.Zstd:
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
return [
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
'--use-compress-program',
|
IS_WINDOWS;
|
||||||
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
switch (compressionMethod) {
|
||||||
];
|
case constants_1.CompressionMethod.Zstd:
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
return BSD_TAR_ZSTD
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
? [
|
||||||
default:
|
'zstd -d --long=30 --force -o',
|
||||||
return ['-z'];
|
constants_1.TarFilename,
|
||||||
}
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -d --force -o',
|
||||||
|
constants_1.TarFilename,
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
// Used for creating the archive
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram(tarPath, compressionMethod) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --long=30 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Executes all commands as separate processes
|
||||||
|
function execCommands(commands, cwd) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
for (const command of commands) {
|
||||||
|
try {
|
||||||
|
yield exec_1.exec(command, undefined, { cwd });
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// List the contents of a tar
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-tf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P'
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
|
// Extract a tar
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-xf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
|
// Create a tar
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
const manifestFilename = 'manifest.txt';
|
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
const commands = yield getCommands(compressionMethod, 'create');
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
yield execCommands(commands, archiveFolder);
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram() {
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const args = [
|
|
||||||
'--posix',
|
|
||||||
...getCompressionProgram(),
|
|
||||||
'-cf',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--exclude',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--files-from',
|
|
||||||
manifestFilename
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod, archiveFolder);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -38553,7 +38653,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
|
@ -38596,6 +38696,11 @@ function getInputAsInt(name, options) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
exports.getInputAsInt = getInputAsInt;
|
exports.getInputAsInt = getInputAsInt;
|
||||||
|
function getInputAsBool(name, options) {
|
||||||
|
const result = core.getInput(name, options);
|
||||||
|
return result.toLowerCase() === "true";
|
||||||
|
}
|
||||||
|
exports.getInputAsBool = getInputAsBool;
|
||||||
function isCacheFeatureAvailable() {
|
function isCacheFeatureAvailable() {
|
||||||
if (cache.isFeatureAvailable()) {
|
if (cache.isFeatureAvailable()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -41075,9 +41180,8 @@ function saveImpl(stateProvider) {
|
||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
cacheId = yield cache.saveCache(cachePaths, primaryKey, {
|
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
||||||
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
cacheId = yield cache.saveCache(cachePaths, primaryKey, { uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) }, enableCrossOsArchive);
|
||||||
});
|
|
||||||
if (cacheId != -1) {
|
if (cacheId != -1) {
|
||||||
core.info(`Cache saved with key: ${primaryKey}`);
|
core.info(`Cache saved with key: ${primaryKey}`);
|
||||||
}
|
}
|
||||||
|
@ -47263,9 +47367,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
* @param primaryKey an explicit key for restoring the cache
|
* @param primaryKey an explicit key for restoring the cache
|
||||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||||
* @param downloadOptions cache download options
|
* @param downloadOptions cache download options
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
restoreKeys = restoreKeys || [];
|
restoreKeys = restoreKeys || [];
|
||||||
|
@ -47283,7 +47388,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod,
|
||||||
|
enableCrossOsArchive
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// Cache not found
|
// Cache not found
|
||||||
|
@ -47330,10 +47436,11 @@ exports.restoreCache = restoreCache;
|
||||||
*
|
*
|
||||||
* @param paths a list of file paths to be cached
|
* @param paths a list of file paths to be cached
|
||||||
* @param key an explicit key for restoring the cache
|
* @param key an explicit key for restoring the cache
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||||
* @param options cache upload options
|
* @param options cache upload options
|
||||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||||
*/
|
*/
|
||||||
function saveCache(paths, key, options) {
|
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
var _a, _b, _c, _d, _e;
|
var _a, _b, _c, _d, _e;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
|
@ -47364,6 +47471,7 @@ function saveCache(paths, key, options) {
|
||||||
core.debug('Reserving Cache');
|
core.debug('Reserving Cache');
|
||||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||||
compressionMethod,
|
compressionMethod,
|
||||||
|
enableCrossOsArchive,
|
||||||
cacheSize: archiveFileSize
|
cacheSize: archiveFileSize
|
||||||
});
|
});
|
||||||
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
||||||
|
@ -53290,6 +53398,11 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
|
var ArchiveToolType;
|
||||||
|
(function (ArchiveToolType) {
|
||||||
|
ArchiveToolType["GNU"] = "gnu";
|
||||||
|
ArchiveToolType["BSD"] = "bsd";
|
||||||
|
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53298,6 +53411,12 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
|
// The default path of GNUtar on hosted Windows runners
|
||||||
|
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
||||||
|
// The default path of BSDtar on hosted Windows runners
|
||||||
|
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
||||||
|
exports.TarFilename = 'cache.tar';
|
||||||
|
exports.ManifestFilename = 'manifest.txt';
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
337
dist/save/index.js
vendored
337
dist/save/index.js
vendored
|
@ -1177,10 +1177,6 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
|
||||||
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
|
||||||
return constants_1.CompressionMethod.Gzip;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function isGnuTarInstalled() {
|
function getGnuTarPathOnWindows() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||||
|
return constants_1.GnuTarPathOnWindows;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar');
|
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.isGnuTarInstalled = isGnuTarInstalled;
|
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3384,7 +3383,6 @@ const crypto = __importStar(__webpack_require__(417));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const url_1 = __webpack_require__(835);
|
const url_1 = __webpack_require__(835);
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
const downloadUtils_1 = __webpack_require__(251);
|
const downloadUtils_1 = __webpack_require__(251);
|
||||||
const options_1 = __webpack_require__(538);
|
const options_1 = __webpack_require__(538);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
@ -3414,10 +3412,17 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion(paths, compressionMethod) {
|
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||||
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip
|
const components = paths;
|
||||||
? []
|
// Add compression method to cache version to restore
|
||||||
: [compressionMethod]);
|
// compressed cache as per compression method
|
||||||
|
if (compressionMethod) {
|
||||||
|
components.push(compressionMethod);
|
||||||
|
}
|
||||||
|
// Only check for windows platforms if enableCrossOsArchive is false
|
||||||
|
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||||
|
components.push('windows-only');
|
||||||
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto
|
return crypto
|
||||||
|
@ -3429,9 +3434,10 @@ exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
|
// Cache not found
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3445,6 +3451,7 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -3490,7 +3497,7 @@ exports.downloadCache = downloadCache;
|
||||||
function reserveCache(key, paths, options) {
|
function reserveCache(key, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
version,
|
version,
|
||||||
|
@ -4970,7 +4977,8 @@ var Inputs;
|
||||||
Inputs["Key"] = "key";
|
Inputs["Key"] = "key";
|
||||||
Inputs["Path"] = "path";
|
Inputs["Path"] = "path";
|
||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
|
@ -38124,27 +38132,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
|
const core_1 = __webpack_require__(470);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
function getTarPath(args, compressionMethod) {
|
core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
|
||||||
|
// Returns tar path and type: BSD or GNU
|
||||||
|
function getTarPath() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
||||||
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
const systemTar = constants_1.SystemTarPathOnWindows;
|
||||||
// We only use zstandard compression on windows when gnu tar is installed due to
|
if (gnuTar) {
|
||||||
// a bug with compressing large files with bsdtar + zstd
|
// Use GNUtar as default on windows
|
||||||
args.push('--force-local');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
||||||
}
|
|
||||||
else if (yield utils.isGnuTarInstalled()) {
|
|
||||||
args.push('--force-local');
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38152,25 +38160,92 @@ function getTarPath(args, compressionMethod) {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
args.push('--delay-directory-restore');
|
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||||
return gnuTar;
|
}
|
||||||
|
else {
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.BSD
|
||||||
|
};
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return yield io.which('tar', true);
|
// Default assumption is GNU tar is present in path
|
||||||
|
return {
|
||||||
|
path: yield io.which('tar', true),
|
||||||
|
type: constants_1.ArchiveToolType.GNU
|
||||||
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function execTar(args, compressionMethod, cwd) {
|
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
||||||
|
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
const args = [`"${tarPath.path}"`];
|
||||||
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const tarFile = 'cache.tar';
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// Speficic args for BSD tar on windows for workaround
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
// Method specific args
|
||||||
|
switch (type) {
|
||||||
|
case 'create':
|
||||||
|
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
||||||
|
break;
|
||||||
|
case 'extract':
|
||||||
|
args.push('-xf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
||||||
|
break;
|
||||||
|
case 'list':
|
||||||
|
args.push('-tf', BSD_TAR_ZSTD
|
||||||
|
? tarFile
|
||||||
|
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
catch (error) {
|
// Platform specific args
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
||||||
|
switch (process.platform) {
|
||||||
|
case 'win32':
|
||||||
|
args.push('--force-local');
|
||||||
|
break;
|
||||||
|
case 'darwin':
|
||||||
|
args.push('--delay-directory-restore');
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return args;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Returns commands to run tar and compression program
|
||||||
|
function getCommands(compressionMethod, type, archivePath = '') {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let args;
|
||||||
|
const tarPath = yield getTarPath();
|
||||||
|
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||||
|
const compressionArgs = type !== 'create'
|
||||||
|
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
||||||
|
: yield getCompressionProgram(tarPath, compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
|
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||||
|
}
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
return [args.join(' ')];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38178,91 +38253,116 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getCompressionProgram(compressionMethod) {
|
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
||||||
// -d: Decompress.
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// -d: Decompress.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
switch (compressionMethod) {
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
case constants_1.CompressionMethod.Zstd:
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
return [
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
'--use-compress-program',
|
IS_WINDOWS;
|
||||||
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
switch (compressionMethod) {
|
||||||
];
|
case constants_1.CompressionMethod.Zstd:
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
return BSD_TAR_ZSTD
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
? [
|
||||||
default:
|
'zstd -d --long=30 --force -o',
|
||||||
return ['-z'];
|
constants_1.TarFilename,
|
||||||
}
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -d --force -o',
|
||||||
|
constants_1.TarFilename,
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
// Used for creating the archive
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram(tarPath, compressionMethod) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
|
IS_WINDOWS;
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --long=30 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return BSD_TAR_ZSTD
|
||||||
|
? [
|
||||||
|
'zstd -T0 --force -o',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
constants_1.TarFilename
|
||||||
|
]
|
||||||
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Executes all commands as separate processes
|
||||||
|
function execCommands(commands, cwd) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
for (const command of commands) {
|
||||||
|
try {
|
||||||
|
yield exec_1.exec(command, undefined, { cwd });
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// List the contents of a tar
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-tf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P'
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
|
// Extract a tar
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = [
|
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||||
...getCompressionProgram(compressionMethod),
|
yield execCommands(commands);
|
||||||
'-xf',
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
|
// Create a tar
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
const manifestFilename = 'manifest.txt';
|
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
const commands = yield getCommands(compressionMethod, 'create');
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
yield execCommands(commands, archiveFolder);
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram() {
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const args = [
|
|
||||||
'--posix',
|
|
||||||
...getCompressionProgram(),
|
|
||||||
'-cf',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--exclude',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'-P',
|
|
||||||
'-C',
|
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'--files-from',
|
|
||||||
manifestFilename
|
|
||||||
];
|
|
||||||
yield execTar(args, compressionMethod, archiveFolder);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -38497,7 +38597,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
|
@ -38540,6 +38640,11 @@ function getInputAsInt(name, options) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
exports.getInputAsInt = getInputAsInt;
|
exports.getInputAsInt = getInputAsInt;
|
||||||
|
function getInputAsBool(name, options) {
|
||||||
|
const result = core.getInput(name, options);
|
||||||
|
return result.toLowerCase() === "true";
|
||||||
|
}
|
||||||
|
exports.getInputAsBool = getInputAsBool;
|
||||||
function isCacheFeatureAvailable() {
|
function isCacheFeatureAvailable() {
|
||||||
if (cache.isFeatureAvailable()) {
|
if (cache.isFeatureAvailable()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -41019,9 +41124,8 @@ function saveImpl(stateProvider) {
|
||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
cacheId = yield cache.saveCache(cachePaths, primaryKey, {
|
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
||||||
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
cacheId = yield cache.saveCache(cachePaths, primaryKey, { uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) }, enableCrossOsArchive);
|
||||||
});
|
|
||||||
if (cacheId != -1) {
|
if (cacheId != -1) {
|
||||||
core.info(`Cache saved with key: ${primaryKey}`);
|
core.info(`Cache saved with key: ${primaryKey}`);
|
||||||
}
|
}
|
||||||
|
@ -47236,9 +47340,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
* @param primaryKey an explicit key for restoring the cache
|
* @param primaryKey an explicit key for restoring the cache
|
||||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||||
* @param downloadOptions cache download options
|
* @param downloadOptions cache download options
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||||
*/
|
*/
|
||||||
function restoreCache(paths, primaryKey, restoreKeys, options) {
|
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
restoreKeys = restoreKeys || [];
|
restoreKeys = restoreKeys || [];
|
||||||
|
@ -47256,7 +47361,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod,
|
||||||
|
enableCrossOsArchive
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// Cache not found
|
// Cache not found
|
||||||
|
@ -47303,10 +47409,11 @@ exports.restoreCache = restoreCache;
|
||||||
*
|
*
|
||||||
* @param paths a list of file paths to be cached
|
* @param paths a list of file paths to be cached
|
||||||
* @param key an explicit key for restoring the cache
|
* @param key an explicit key for restoring the cache
|
||||||
|
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||||
* @param options cache upload options
|
* @param options cache upload options
|
||||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||||
*/
|
*/
|
||||||
function saveCache(paths, key, options) {
|
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
var _a, _b, _c, _d, _e;
|
var _a, _b, _c, _d, _e;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
|
@ -47337,6 +47444,7 @@ function saveCache(paths, key, options) {
|
||||||
core.debug('Reserving Cache');
|
core.debug('Reserving Cache');
|
||||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||||
compressionMethod,
|
compressionMethod,
|
||||||
|
enableCrossOsArchive,
|
||||||
cacheSize: archiveFileSize
|
cacheSize: archiveFileSize
|
||||||
});
|
});
|
||||||
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
||||||
|
@ -53263,6 +53371,11 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
|
var ArchiveToolType;
|
||||||
|
(function (ArchiveToolType) {
|
||||||
|
ArchiveToolType["GNU"] = "gnu";
|
||||||
|
ArchiveToolType["BSD"] = "bsd";
|
||||||
|
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53271,6 +53384,12 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
|
// The default path of GNUtar on hosted Windows runners
|
||||||
|
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
||||||
|
// The default path of BSDtar on hosted Windows runners
|
||||||
|
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
||||||
|
exports.TarFilename = 'cache.tar';
|
||||||
|
exports.ManifestFilename = 'manifest.txt';
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
14
package-lock.json
generated
14
package-lock.json
generated
|
@ -9,7 +9,7 @@
|
||||||
"version": "3.2.2",
|
"version": "3.2.2",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.1.1",
|
"@actions/cache": "^3.1.2",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
@ -36,9 +36,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "3.1.1",
|
"version": "3.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.2.tgz",
|
||||||
"integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==",
|
"integrity": "sha512-3XeKcXIonfIbqvW7gPm/VLOhv1RHQ1dtTgSBCH6OUhCgSTii9bEVgu0PIms7UbLnXeMCKFzECfpbud8fJEvBbQ==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
@ -9722,9 +9722,9 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "3.1.1",
|
"version": "3.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.2.tgz",
|
||||||
"integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==",
|
"integrity": "sha512-3XeKcXIonfIbqvW7gPm/VLOhv1RHQ1dtTgSBCH6OUhCgSTii9bEVgu0PIms7UbLnXeMCKFzECfpbud8fJEvBbQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.1.1",
|
"@actions/cache": "^3.1.2",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
|
|
@ -11,6 +11,10 @@ inputs:
|
||||||
restore-keys:
|
restore-keys:
|
||||||
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||||
required: false
|
required: false
|
||||||
|
enableCrossOsArchive:
|
||||||
|
description: 'An optional boolean when enabled, allows windows runners to restore caches that were saved on other platforms'
|
||||||
|
default: 'false'
|
||||||
|
required: false
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
|
|
|
@ -11,6 +11,10 @@ inputs:
|
||||||
upload-chunk-size:
|
upload-chunk-size:
|
||||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||||
required: false
|
required: false
|
||||||
|
enableCrossOsArchive:
|
||||||
|
description: 'An optional boolean when enabled, allows windows runners to save caches that can be restored on other platforms'
|
||||||
|
default: 'false'
|
||||||
|
required: false
|
||||||
runs:
|
runs:
|
||||||
using: 'node16'
|
using: 'node16'
|
||||||
main: '../dist/save-only/index.js'
|
main: '../dist/save-only/index.js'
|
||||||
|
|
|
@ -2,7 +2,8 @@ export enum Inputs {
|
||||||
Key = "key", // Input for cache, restore, save action
|
Key = "key", // Input for cache, restore, save action
|
||||||
Path = "path", // Input for cache, restore, save action
|
Path = "path", // Input for cache, restore, save action
|
||||||
RestoreKeys = "restore-keys", // Input for cache, restore action
|
RestoreKeys = "restore-keys", // Input for cache, restore action
|
||||||
UploadChunkSize = "upload-chunk-size" // Input for cache, save action
|
UploadChunkSize = "upload-chunk-size", // Input for cache, save action
|
||||||
|
EnableCrossOsArchive = "enableCrossOsArchive" // Input for cache, restore, save action
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum Outputs {
|
export enum Outputs {
|
||||||
|
|
|
@ -31,11 +31,16 @@ async function restoreImpl(
|
||||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
|
const enableCrossOsArchive = utils.getInputAsBool(
|
||||||
|
Inputs.EnableCrossOsArchive
|
||||||
|
);
|
||||||
|
|
||||||
const cacheKey = await cache.restoreCache(
|
const cacheKey = await cache.restoreCache(
|
||||||
cachePaths,
|
cachePaths,
|
||||||
primaryKey,
|
primaryKey,
|
||||||
restoreKeys
|
restoreKeys,
|
||||||
|
{},
|
||||||
|
enableCrossOsArchive
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
|
|
|
@ -52,9 +52,16 @@ async function saveImpl(stateProvider: IStateProvider): Promise<number | void> {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
|
|
||||||
cacheId = await cache.saveCache(cachePaths, primaryKey, {
|
const enableCrossOsArchive = utils.getInputAsBool(
|
||||||
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
Inputs.EnableCrossOsArchive
|
||||||
});
|
);
|
||||||
|
|
||||||
|
cacheId = await cache.saveCache(
|
||||||
|
cachePaths,
|
||||||
|
primaryKey,
|
||||||
|
{ uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize) },
|
||||||
|
enableCrossOsArchive
|
||||||
|
);
|
||||||
|
|
||||||
if (cacheId != -1) {
|
if (cacheId != -1) {
|
||||||
core.info(`Cache saved with key: ${primaryKey}`);
|
core.info(`Cache saved with key: ${primaryKey}`);
|
||||||
|
|
|
@ -52,6 +52,14 @@ export function getInputAsInt(
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getInputAsBool(
|
||||||
|
name: string,
|
||||||
|
options?: core.InputOptions
|
||||||
|
): boolean {
|
||||||
|
const result = core.getInput(name, options);
|
||||||
|
return result.toLowerCase() === "true";
|
||||||
|
}
|
||||||
|
|
||||||
export function isCacheFeatureAvailable(): boolean {
|
export function isCacheFeatureAvailable(): boolean {
|
||||||
if (cache.isFeatureAvailable()) {
|
if (cache.isFeatureAvailable()) {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -13,6 +13,7 @@ interface CacheInput {
|
||||||
path: string;
|
path: string;
|
||||||
key: string;
|
key: string;
|
||||||
restoreKeys?: string[];
|
restoreKeys?: string[];
|
||||||
|
enableCrossOsArchive?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setInputs(input: CacheInput): void {
|
export function setInputs(input: CacheInput): void {
|
||||||
|
@ -20,6 +21,11 @@ export function setInputs(input: CacheInput): void {
|
||||||
setInput(Inputs.Key, input.key);
|
setInput(Inputs.Key, input.key);
|
||||||
input.restoreKeys &&
|
input.restoreKeys &&
|
||||||
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
||||||
|
input.enableCrossOsArchive !== undefined &&
|
||||||
|
setInput(
|
||||||
|
Inputs.EnableCrossOsArchive,
|
||||||
|
input.enableCrossOsArchive.toString()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function clearInputs(): void {
|
export function clearInputs(): void {
|
||||||
|
@ -27,4 +33,5 @@ export function clearInputs(): void {
|
||||||
delete process.env[getInputName(Inputs.Key)];
|
delete process.env[getInputName(Inputs.Key)];
|
||||||
delete process.env[getInputName(Inputs.RestoreKeys)];
|
delete process.env[getInputName(Inputs.RestoreKeys)];
|
||||||
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
||||||
|
delete process.env[getInputName(Inputs.EnableCrossOsArchive)];
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue