mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-30 23:59:16 +01:00
Revert compression changes related to windows
due to symlink issues
This commit is contained in:
parent
d1507cccba
commit
c9cbdaf0ee
7 changed files with 404 additions and 876 deletions
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
name: "@actions/cache"
|
name: "@actions/cache"
|
||||||
version: 3.1.0
|
version: 3.1.1
|
||||||
type: npm
|
type: npm
|
||||||
summary:
|
summary:
|
||||||
homepage:
|
homepage:
|
||||||
|
|
314
dist/restore-only/index.js
vendored
314
dist/restore-only/index.js
vendored
|
@ -1177,6 +1177,10 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
||||||
|
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
||||||
|
return constants_1.CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1200,16 +1204,13 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function getGnuTarPathOnWindows() {
|
function isGnuTarInstalled() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
|
||||||
return constants_1.GnuTarPathOnWindows;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
return versionOutput.toLowerCase().includes('gnu tar');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
exports.isGnuTarInstalled = isGnuTarInstalled;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3431,7 +3432,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
// Cache not found
|
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3445,7 +3445,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -38223,19 +38222,21 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Returns tar path and type: BSD or GNU
|
function getTarPath(args, compressionMethod) {
|
||||||
function getTarPath() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
||||||
const systemTar = constants_1.SystemTarPathOnWindows;
|
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
if (gnuTar) {
|
// We only use zstandard compression on windows when gnu tar is installed due to
|
||||||
// Use GNUtar as default on windows
|
// a bug with compressing large files with bsdtar + zstd
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
return systemTar;
|
||||||
|
}
|
||||||
|
else if (yield utils.isGnuTarInstalled()) {
|
||||||
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38243,92 +38244,25 @@ function getTarPath() {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--delay-directory-restore');
|
||||||
}
|
return gnuTar;
|
||||||
else {
|
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.BSD
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// Default assumption is GNU tar is present in path
|
return yield io.which('tar', true);
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.GNU
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
function execTar(args, compressionMethod, cwd) {
|
||||||
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [`"${tarPath.path}"`];
|
try {
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
||||||
const tarFile = 'cache.tar';
|
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// Speficic args for BSD tar on windows for workaround
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
// Method specific args
|
|
||||||
switch (type) {
|
|
||||||
case 'create':
|
|
||||||
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
|
||||||
break;
|
|
||||||
case 'extract':
|
|
||||||
args.push('-xf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
|
||||||
break;
|
|
||||||
case 'list':
|
|
||||||
args.push('-tf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
// Platform specific args
|
catch (error) {
|
||||||
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
switch (process.platform) {
|
|
||||||
case 'win32':
|
|
||||||
args.push('--force-local');
|
|
||||||
break;
|
|
||||||
case 'darwin':
|
|
||||||
args.push('--delay-directory-restore');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return args;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Returns commands to run tar and compression program
|
|
||||||
function getCommands(compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let args;
|
|
||||||
const tarPath = yield getTarPath();
|
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
|
||||||
const compressionArgs = type !== 'create'
|
|
||||||
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
|
||||||
: yield getCompressionProgram(tarPath, compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
|
||||||
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
|
||||||
}
|
|
||||||
if (BSD_TAR_ZSTD) {
|
|
||||||
return args;
|
|
||||||
}
|
|
||||||
return [args.join(' ')];
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38336,116 +38270,91 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
function getCompressionProgram(compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
// -d: Decompress.
|
||||||
// -d: Decompress.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
switch (compressionMethod) {
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
case constants_1.CompressionMethod.Zstd:
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
return [
|
||||||
IS_WINDOWS;
|
'--use-compress-program',
|
||||||
switch (compressionMethod) {
|
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
||||||
case constants_1.CompressionMethod.Zstd:
|
];
|
||||||
return BSD_TAR_ZSTD
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
? [
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
||||||
'zstd -d --long=30 --force -o',
|
default:
|
||||||
constants_1.TarFilename,
|
return ['-z'];
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
}
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -d --force -o',
|
|
||||||
constants_1.TarFilename,
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Used for creating the archive
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram(tarPath, compressionMethod) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --long=30 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Executes all commands as separate processes
|
|
||||||
function execCommands(commands, cwd) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
for (const command of commands) {
|
|
||||||
try {
|
|
||||||
yield exec_1.exec(command, undefined, { cwd });
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// List the contents of a tar
|
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-tf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P'
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
// Extract a tar
|
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-xf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
// Create a tar
|
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
const manifestFilename = 'manifest.txt';
|
||||||
const commands = yield getCommands(compressionMethod, 'create');
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
yield execCommands(commands, archiveFolder);
|
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram() {
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const args = [
|
||||||
|
'--posix',
|
||||||
|
...getCompressionProgram(),
|
||||||
|
'-cf',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--exclude',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--files-from',
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod, archiveFolder);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -47195,7 +47104,6 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const cacheHttpClient = __importStar(__webpack_require__(114));
|
const cacheHttpClient = __importStar(__webpack_require__(114));
|
||||||
const tar_1 = __webpack_require__(434);
|
const tar_1 = __webpack_require__(434);
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
class ValidationError extends Error {
|
class ValidationError extends Error {
|
||||||
constructor(message) {
|
constructor(message) {
|
||||||
super(message);
|
super(message);
|
||||||
|
@ -47257,31 +47165,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
let cacheEntry;
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let compressionMethod = yield utils.getCompressionMethod();
|
|
||||||
let archivePath = '';
|
let archivePath = '';
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// This is to support the old cache entry created by gzip on windows.
|
// Cache not found
|
||||||
if (process.platform === 'win32' &&
|
return undefined;
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
|
||||||
compressionMethod = constants_1.CompressionMethod.Gzip;
|
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
|
||||||
compressionMethod
|
|
||||||
});
|
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
@ -53362,11 +53255,6 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
var ArchiveToolType;
|
|
||||||
(function (ArchiveToolType) {
|
|
||||||
ArchiveToolType["GNU"] = "gnu";
|
|
||||||
ArchiveToolType["BSD"] = "bsd";
|
|
||||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53375,12 +53263,6 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
// The default path of GNUtar on hosted Windows runners
|
|
||||||
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
|
||||||
// The default path of BSDtar on hosted Windows runners
|
|
||||||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
|
||||||
exports.TarFilename = 'cache.tar';
|
|
||||||
exports.ManifestFilename = 'manifest.txt';
|
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
314
dist/restore/index.js
vendored
314
dist/restore/index.js
vendored
|
@ -1177,6 +1177,10 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
||||||
|
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
||||||
|
return constants_1.CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1200,16 +1204,13 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function getGnuTarPathOnWindows() {
|
function isGnuTarInstalled() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
|
||||||
return constants_1.GnuTarPathOnWindows;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
return versionOutput.toLowerCase().includes('gnu tar');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
exports.isGnuTarInstalled = isGnuTarInstalled;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3431,7 +3432,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
// Cache not found
|
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3445,7 +3445,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -38136,19 +38135,21 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Returns tar path and type: BSD or GNU
|
function getTarPath(args, compressionMethod) {
|
||||||
function getTarPath() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
||||||
const systemTar = constants_1.SystemTarPathOnWindows;
|
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
if (gnuTar) {
|
// We only use zstandard compression on windows when gnu tar is installed due to
|
||||||
// Use GNUtar as default on windows
|
// a bug with compressing large files with bsdtar + zstd
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
return systemTar;
|
||||||
|
}
|
||||||
|
else if (yield utils.isGnuTarInstalled()) {
|
||||||
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38156,92 +38157,25 @@ function getTarPath() {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--delay-directory-restore');
|
||||||
}
|
return gnuTar;
|
||||||
else {
|
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.BSD
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// Default assumption is GNU tar is present in path
|
return yield io.which('tar', true);
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.GNU
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
function execTar(args, compressionMethod, cwd) {
|
||||||
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [`"${tarPath.path}"`];
|
try {
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
||||||
const tarFile = 'cache.tar';
|
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// Speficic args for BSD tar on windows for workaround
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
// Method specific args
|
|
||||||
switch (type) {
|
|
||||||
case 'create':
|
|
||||||
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
|
||||||
break;
|
|
||||||
case 'extract':
|
|
||||||
args.push('-xf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
|
||||||
break;
|
|
||||||
case 'list':
|
|
||||||
args.push('-tf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
// Platform specific args
|
catch (error) {
|
||||||
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
switch (process.platform) {
|
|
||||||
case 'win32':
|
|
||||||
args.push('--force-local');
|
|
||||||
break;
|
|
||||||
case 'darwin':
|
|
||||||
args.push('--delay-directory-restore');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return args;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Returns commands to run tar and compression program
|
|
||||||
function getCommands(compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let args;
|
|
||||||
const tarPath = yield getTarPath();
|
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
|
||||||
const compressionArgs = type !== 'create'
|
|
||||||
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
|
||||||
: yield getCompressionProgram(tarPath, compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
|
||||||
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
|
||||||
}
|
|
||||||
if (BSD_TAR_ZSTD) {
|
|
||||||
return args;
|
|
||||||
}
|
|
||||||
return [args.join(' ')];
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38249,116 +38183,91 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
function getCompressionProgram(compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
// -d: Decompress.
|
||||||
// -d: Decompress.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
switch (compressionMethod) {
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
case constants_1.CompressionMethod.Zstd:
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
return [
|
||||||
IS_WINDOWS;
|
'--use-compress-program',
|
||||||
switch (compressionMethod) {
|
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
||||||
case constants_1.CompressionMethod.Zstd:
|
];
|
||||||
return BSD_TAR_ZSTD
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
? [
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
||||||
'zstd -d --long=30 --force -o',
|
default:
|
||||||
constants_1.TarFilename,
|
return ['-z'];
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
}
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -d --force -o',
|
|
||||||
constants_1.TarFilename,
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Used for creating the archive
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram(tarPath, compressionMethod) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --long=30 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Executes all commands as separate processes
|
|
||||||
function execCommands(commands, cwd) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
for (const command of commands) {
|
|
||||||
try {
|
|
||||||
yield exec_1.exec(command, undefined, { cwd });
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// List the contents of a tar
|
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-tf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P'
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
// Extract a tar
|
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-xf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
// Create a tar
|
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
const manifestFilename = 'manifest.txt';
|
||||||
const commands = yield getCommands(compressionMethod, 'create');
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
yield execCommands(commands, archiveFolder);
|
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram() {
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const args = [
|
||||||
|
'--posix',
|
||||||
|
...getCompressionProgram(),
|
||||||
|
'-cf',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--exclude',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--files-from',
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod, archiveFolder);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -47166,7 +47075,6 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const cacheHttpClient = __importStar(__webpack_require__(114));
|
const cacheHttpClient = __importStar(__webpack_require__(114));
|
||||||
const tar_1 = __webpack_require__(434);
|
const tar_1 = __webpack_require__(434);
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
class ValidationError extends Error {
|
class ValidationError extends Error {
|
||||||
constructor(message) {
|
constructor(message) {
|
||||||
super(message);
|
super(message);
|
||||||
|
@ -47228,31 +47136,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
let cacheEntry;
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let compressionMethod = yield utils.getCompressionMethod();
|
|
||||||
let archivePath = '';
|
let archivePath = '';
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// This is to support the old cache entry created by gzip on windows.
|
// Cache not found
|
||||||
if (process.platform === 'win32' &&
|
return undefined;
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
|
||||||
compressionMethod = constants_1.CompressionMethod.Gzip;
|
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
|
||||||
compressionMethod
|
|
||||||
});
|
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
@ -53362,11 +53255,6 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
var ArchiveToolType;
|
|
||||||
(function (ArchiveToolType) {
|
|
||||||
ArchiveToolType["GNU"] = "gnu";
|
|
||||||
ArchiveToolType["BSD"] = "bsd";
|
|
||||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53375,12 +53263,6 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
// The default path of GNUtar on hosted Windows runners
|
|
||||||
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
|
||||||
// The default path of BSDtar on hosted Windows runners
|
|
||||||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
|
||||||
exports.TarFilename = 'cache.tar';
|
|
||||||
exports.ManifestFilename = 'manifest.txt';
|
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
314
dist/save-only/index.js
vendored
314
dist/save-only/index.js
vendored
|
@ -1233,6 +1233,10 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
||||||
|
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
||||||
|
return constants_1.CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1256,16 +1260,13 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function getGnuTarPathOnWindows() {
|
function isGnuTarInstalled() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
|
||||||
return constants_1.GnuTarPathOnWindows;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
return versionOutput.toLowerCase().includes('gnu tar');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
exports.isGnuTarInstalled = isGnuTarInstalled;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3487,7 +3488,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
// Cache not found
|
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3501,7 +3501,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -38187,19 +38186,21 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Returns tar path and type: BSD or GNU
|
function getTarPath(args, compressionMethod) {
|
||||||
function getTarPath() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
||||||
const systemTar = constants_1.SystemTarPathOnWindows;
|
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
if (gnuTar) {
|
// We only use zstandard compression on windows when gnu tar is installed due to
|
||||||
// Use GNUtar as default on windows
|
// a bug with compressing large files with bsdtar + zstd
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
return systemTar;
|
||||||
|
}
|
||||||
|
else if (yield utils.isGnuTarInstalled()) {
|
||||||
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38207,92 +38208,25 @@ function getTarPath() {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--delay-directory-restore');
|
||||||
}
|
return gnuTar;
|
||||||
else {
|
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.BSD
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// Default assumption is GNU tar is present in path
|
return yield io.which('tar', true);
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.GNU
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
function execTar(args, compressionMethod, cwd) {
|
||||||
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [`"${tarPath.path}"`];
|
try {
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
||||||
const tarFile = 'cache.tar';
|
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// Speficic args for BSD tar on windows for workaround
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
// Method specific args
|
|
||||||
switch (type) {
|
|
||||||
case 'create':
|
|
||||||
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
|
||||||
break;
|
|
||||||
case 'extract':
|
|
||||||
args.push('-xf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
|
||||||
break;
|
|
||||||
case 'list':
|
|
||||||
args.push('-tf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
// Platform specific args
|
catch (error) {
|
||||||
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
switch (process.platform) {
|
|
||||||
case 'win32':
|
|
||||||
args.push('--force-local');
|
|
||||||
break;
|
|
||||||
case 'darwin':
|
|
||||||
args.push('--delay-directory-restore');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return args;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Returns commands to run tar and compression program
|
|
||||||
function getCommands(compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let args;
|
|
||||||
const tarPath = yield getTarPath();
|
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
|
||||||
const compressionArgs = type !== 'create'
|
|
||||||
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
|
||||||
: yield getCompressionProgram(tarPath, compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
|
||||||
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
|
||||||
}
|
|
||||||
if (BSD_TAR_ZSTD) {
|
|
||||||
return args;
|
|
||||||
}
|
|
||||||
return [args.join(' ')];
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38300,116 +38234,91 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
function getCompressionProgram(compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
// -d: Decompress.
|
||||||
// -d: Decompress.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
switch (compressionMethod) {
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
case constants_1.CompressionMethod.Zstd:
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
return [
|
||||||
IS_WINDOWS;
|
'--use-compress-program',
|
||||||
switch (compressionMethod) {
|
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
||||||
case constants_1.CompressionMethod.Zstd:
|
];
|
||||||
return BSD_TAR_ZSTD
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
? [
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
||||||
'zstd -d --long=30 --force -o',
|
default:
|
||||||
constants_1.TarFilename,
|
return ['-z'];
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
}
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -d --force -o',
|
|
||||||
constants_1.TarFilename,
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Used for creating the archive
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram(tarPath, compressionMethod) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --long=30 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Executes all commands as separate processes
|
|
||||||
function execCommands(commands, cwd) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
for (const command of commands) {
|
|
||||||
try {
|
|
||||||
yield exec_1.exec(command, undefined, { cwd });
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// List the contents of a tar
|
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-tf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P'
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
// Extract a tar
|
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-xf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
// Create a tar
|
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
const manifestFilename = 'manifest.txt';
|
||||||
const commands = yield getCommands(compressionMethod, 'create');
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
yield execCommands(commands, archiveFolder);
|
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram() {
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const args = [
|
||||||
|
'--posix',
|
||||||
|
...getCompressionProgram(),
|
||||||
|
'-cf',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--exclude',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--files-from',
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod, archiveFolder);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -47308,7 +47217,6 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const cacheHttpClient = __importStar(__webpack_require__(114));
|
const cacheHttpClient = __importStar(__webpack_require__(114));
|
||||||
const tar_1 = __webpack_require__(434);
|
const tar_1 = __webpack_require__(434);
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
class ValidationError extends Error {
|
class ValidationError extends Error {
|
||||||
constructor(message) {
|
constructor(message) {
|
||||||
super(message);
|
super(message);
|
||||||
|
@ -47370,31 +47278,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
let cacheEntry;
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let compressionMethod = yield utils.getCompressionMethod();
|
|
||||||
let archivePath = '';
|
let archivePath = '';
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// This is to support the old cache entry created by gzip on windows.
|
// Cache not found
|
||||||
if (process.platform === 'win32' &&
|
return undefined;
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
|
||||||
compressionMethod = constants_1.CompressionMethod.Gzip;
|
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
|
||||||
compressionMethod
|
|
||||||
});
|
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
@ -53397,11 +53290,6 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
var ArchiveToolType;
|
|
||||||
(function (ArchiveToolType) {
|
|
||||||
ArchiveToolType["GNU"] = "gnu";
|
|
||||||
ArchiveToolType["BSD"] = "bsd";
|
|
||||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53410,12 +53298,6 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
// The default path of GNUtar on hosted Windows runners
|
|
||||||
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
|
||||||
// The default path of BSDtar on hosted Windows runners
|
|
||||||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
|
||||||
exports.TarFilename = 'cache.tar';
|
|
||||||
exports.ManifestFilename = 'manifest.txt';
|
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
314
dist/save/index.js
vendored
314
dist/save/index.js
vendored
|
@ -1177,6 +1177,10 @@ function getVersion(app) {
|
||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
||||||
|
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
||||||
|
return constants_1.CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
|
@ -1200,16 +1204,13 @@ function getCacheFileName(compressionMethod) {
|
||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function getGnuTarPathOnWindows() {
|
function isGnuTarInstalled() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
|
||||||
return constants_1.GnuTarPathOnWindows;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
return versionOutput.toLowerCase().includes('gnu tar');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
exports.isGnuTarInstalled = isGnuTarInstalled;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
|
@ -3431,7 +3432,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
// Cache not found
|
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
// List cache for primary key only if cache miss occurs
|
// List cache for primary key only if cache miss occurs
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
|
@ -3445,7 +3445,6 @@ function getCacheEntry(keys, paths, options) {
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
@ -38131,19 +38130,21 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Returns tar path and type: BSD or GNU
|
function getTarPath(args, compressionMethod) {
|
||||||
function getTarPath() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
||||||
const systemTar = constants_1.SystemTarPathOnWindows;
|
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
if (gnuTar) {
|
// We only use zstandard compression on windows when gnu tar is installed due to
|
||||||
// Use GNUtar as default on windows
|
// a bug with compressing large files with bsdtar + zstd
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
return systemTar;
|
||||||
|
}
|
||||||
|
else if (yield utils.isGnuTarInstalled()) {
|
||||||
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -38151,92 +38152,25 @@ function getTarPath() {
|
||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--delay-directory-restore');
|
||||||
}
|
return gnuTar;
|
||||||
else {
|
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.BSD
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// Default assumption is GNU tar is present in path
|
return yield io.which('tar', true);
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.GNU
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
function execTar(args, compressionMethod, cwd) {
|
||||||
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [`"${tarPath.path}"`];
|
try {
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
||||||
const tarFile = 'cache.tar';
|
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
// Speficic args for BSD tar on windows for workaround
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
// Method specific args
|
|
||||||
switch (type) {
|
|
||||||
case 'create':
|
|
||||||
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
|
||||||
break;
|
|
||||||
case 'extract':
|
|
||||||
args.push('-xf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
|
||||||
break;
|
|
||||||
case 'list':
|
|
||||||
args.push('-tf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
// Platform specific args
|
catch (error) {
|
||||||
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
switch (process.platform) {
|
|
||||||
case 'win32':
|
|
||||||
args.push('--force-local');
|
|
||||||
break;
|
|
||||||
case 'darwin':
|
|
||||||
args.push('--delay-directory-restore');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return args;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Returns commands to run tar and compression program
|
|
||||||
function getCommands(compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let args;
|
|
||||||
const tarPath = yield getTarPath();
|
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
|
||||||
const compressionArgs = type !== 'create'
|
|
||||||
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
|
||||||
: yield getCompressionProgram(tarPath, compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
|
||||||
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
|
||||||
}
|
|
||||||
if (BSD_TAR_ZSTD) {
|
|
||||||
return args;
|
|
||||||
}
|
|
||||||
return [args.join(' ')];
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
|
@ -38244,116 +38178,91 @@ function getWorkingDirectory() {
|
||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
function getCompressionProgram(compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
// -d: Decompress.
|
||||||
// -d: Decompress.
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
switch (compressionMethod) {
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
case constants_1.CompressionMethod.Zstd:
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
return [
|
||||||
IS_WINDOWS;
|
'--use-compress-program',
|
||||||
switch (compressionMethod) {
|
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
||||||
case constants_1.CompressionMethod.Zstd:
|
];
|
||||||
return BSD_TAR_ZSTD
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
? [
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
||||||
'zstd -d --long=30 --force -o',
|
default:
|
||||||
constants_1.TarFilename,
|
return ['-z'];
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
}
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -d --force -o',
|
|
||||||
constants_1.TarFilename,
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Used for creating the archive
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram(tarPath, compressionMethod) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --long=30 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'zstd -T0 --force -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// Executes all commands as separate processes
|
|
||||||
function execCommands(commands, cwd) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
for (const command of commands) {
|
|
||||||
try {
|
|
||||||
yield exec_1.exec(command, undefined, { cwd });
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// List the contents of a tar
|
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-tf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P'
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
// Extract a tar
|
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
const args = [
|
||||||
yield execCommands(commands);
|
...getCompressionProgram(compressionMethod),
|
||||||
|
'-xf',
|
||||||
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
// Create a tar
|
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
const manifestFilename = 'manifest.txt';
|
||||||
const commands = yield getCommands(compressionMethod, 'create');
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
yield execCommands(commands, archiveFolder);
|
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram() {
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const args = [
|
||||||
|
'--posix',
|
||||||
|
...getCompressionProgram(),
|
||||||
|
'-cf',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--exclude',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--files-from',
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod, archiveFolder);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
|
@ -47281,7 +47190,6 @@ const path = __importStar(__webpack_require__(622));
|
||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const cacheHttpClient = __importStar(__webpack_require__(114));
|
const cacheHttpClient = __importStar(__webpack_require__(114));
|
||||||
const tar_1 = __webpack_require__(434);
|
const tar_1 = __webpack_require__(434);
|
||||||
const constants_1 = __webpack_require__(931);
|
|
||||||
class ValidationError extends Error {
|
class ValidationError extends Error {
|
||||||
constructor(message) {
|
constructor(message) {
|
||||||
super(message);
|
super(message);
|
||||||
|
@ -47343,31 +47251,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
let cacheEntry;
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let compressionMethod = yield utils.getCompressionMethod();
|
|
||||||
let archivePath = '';
|
let archivePath = '';
|
||||||
try {
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
// This is to support the old cache entry created by gzip on windows.
|
// Cache not found
|
||||||
if (process.platform === 'win32' &&
|
return undefined;
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
|
||||||
compressionMethod = constants_1.CompressionMethod.Gzip;
|
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
|
||||||
compressionMethod
|
|
||||||
});
|
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
@ -53370,11 +53263,6 @@ var CompressionMethod;
|
||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
var ArchiveToolType;
|
|
||||||
(function (ArchiveToolType) {
|
|
||||||
ArchiveToolType["GNU"] = "gnu";
|
|
||||||
ArchiveToolType["BSD"] = "bsd";
|
|
||||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
|
@ -53383,12 +53271,6 @@ exports.DefaultRetryDelay = 5000;
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
// The default path of GNUtar on hosted Windows runners
|
|
||||||
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
|
||||||
// The default path of BSDtar on hosted Windows runners
|
|
||||||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
|
||||||
exports.TarFilename = 'cache.tar';
|
|
||||||
exports.ManifestFilename = 'manifest.txt';
|
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
18
package-lock.json
generated
18
package-lock.json
generated
|
@ -1,15 +1,15 @@
|
||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.2.1",
|
"version": "3.2.2",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.2.1",
|
"version": "3.2.2",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.1.0",
|
"@actions/cache": "^3.1.1",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
@ -36,9 +36,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz",
|
||||||
"integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==",
|
"integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
@ -9722,9 +9722,9 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz",
|
||||||
"integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==",
|
"integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.2.1",
|
"version": "3.2.2",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.1.0",
|
"@actions/cache": "^3.1.1",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
|
Loading…
Reference in a new issue