1
0
Fork 0
mirror of https://github.com/actions/upload-artifact synced 2024-12-22 06:22:45 +00:00

Update to @actions/artifact 0.3.1

This commit is contained in:
Konrad Pabjan 2020-04-23 11:50:01 +02:00
parent 85bdb8f4a9
commit f42ee54997
3 changed files with 53 additions and 68 deletions

113
dist/index.js vendored
View file

@ -2493,7 +2493,7 @@ class StatusReporter {
for (const value of Array.from(this.largeFiles.values())) { for (const value of Array.from(this.largeFiles.values())) {
core_1.info(value); core_1.info(value);
} }
// delete all entires in the map after displaying the information so it will not be displayed again unless explicitly added // delete all entries in the map after displaying the information so it will not be displayed again unless explicitly added
this.largeFiles.clear(); this.largeFiles.clear();
}, 1000); }, 1000);
} }
@ -6658,61 +6658,55 @@ class UploadHttpClient {
} }
else { else {
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
// npm tmp-promise package and this file gets used during compression for the GZip file that gets created // npm tmp-promise package and this file gets used to create a GZipped file
return tmp const tempFile = yield tmp.file();
.file() // create a GZip file of the original file being uploaded, the original file should not be modified in any way
.then((tmpFile) => __awaiter(this, void 0, void 0, function* () { uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path);
// create a GZip file of the original file being uploaded, the original file should not be modified in any way let uploadFilePath = tempFile.path;
uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tmpFile.path); // compression did not help with size reduction, use the original file for upload and delete the temp GZip file
let uploadFilePath = tmpFile.path; if (totalFileSize < uploadFileSize) {
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file uploadFileSize = totalFileSize;
if (totalFileSize < uploadFileSize) { uploadFilePath = parameters.file;
uploadFileSize = totalFileSize; isGzip = false;
uploadFilePath = parameters.file; }
isGzip = false; let abortFileUpload = false;
tmpFile.cleanup(); // upload only a single chunk at a time
while (offset < uploadFileSize) {
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status
if (uploadFileSize > 104857600) {
this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize);
} }
let abortFileUpload = false; const start = offset;
// upload only a single chunk at a time const end = offset + chunkSize - 1;
while (offset < uploadFileSize) { offset += parameters.maxChunkSize;
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); if (abortFileUpload) {
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
if (uploadFileSize > 104857600) { failedChunkSizes += chunkSize;
this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize); continue;
}
const start = offset;
const end = offset + chunkSize - 1;
offset += parameters.maxChunkSize;
if (abortFileUpload) {
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
failedChunkSizes += chunkSize;
continue;
}
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, fs.createReadStream(uploadFilePath, {
start,
end,
autoClose: false
}), start, end, uploadFileSize, isGzip, totalFileSize);
if (!result) {
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
// successfully uploaded so the server may report a different size for what was uploaded
isUploadSuccessful = false;
failedChunkSizes += chunkSize;
core.warning(`Aborting upload for ${parameters.file} due to failure`);
abortFileUpload = true;
}
} }
})) const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, fs.createReadStream(uploadFilePath, {
.then(() => __awaiter(this, void 0, void 0, function* () { start,
// only after the file upload is complete and the temporary file is deleted, return the UploadResult end,
return new Promise(resolve => { autoClose: false
resolve({ }), start, end, uploadFileSize, isGzip, totalFileSize);
isSuccess: isUploadSuccessful, if (!result) {
successfulUploadSize: uploadFileSize - failedChunkSizes, // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
totalSize: totalFileSize // successfully uploaded so the server may report a different size for what was uploaded
}); isUploadSuccessful = false;
}); failedChunkSizes += chunkSize;
})); core.warning(`Aborting upload for ${parameters.file} due to failure`);
abortFileUpload = true;
}
}
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
yield tempFile.cleanup();
return {
isSuccess: isUploadSuccessful,
successfulUploadSize: uploadFileSize - failedChunkSizes,
totalSize: totalFileSize
};
} }
}); });
} }
@ -7913,16 +7907,7 @@ exports.displayHttpDiagnostics = displayHttpDiagnostics;
* *
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
*/ */
const invalidArtifactFilePathCharacters = [ const invalidArtifactFilePathCharacters = ['"', ':', '<', '>', '|', '*', '?'];
'"',
':',
'<',
'>',
'|',
'*',
'?',
' '
];
const invalidArtifactNameCharacters = [ const invalidArtifactNameCharacters = [
...invalidArtifactFilePathCharacters, ...invalidArtifactFilePathCharacters,
'\\', '\\',

6
package-lock.json generated
View file

@ -5,9 +5,9 @@
"requires": true, "requires": true,
"dependencies": { "dependencies": {
"@actions/artifact": { "@actions/artifact": {
"version": "0.3.0", "version": "0.3.1",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.3.0.tgz", "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.3.1.tgz",
"integrity": "sha512-t35sO6q2nVEb0Y/4GOKK7XlBo1qqnKlapMry46OFjFelsGmAgHMGMBq16s4Q0XCJrvToAhiRUcf3wZ8xyByDuw==", "integrity": "sha512-czRvOioOpuvmF/qDevfVVpZeBt7pjYlrnmM1+tRuCpKJxjWFYgi5MIW7TfscyupXPvtJz9jIxMjvxy9Eug1QEA==",
"dev": true, "dev": true,
"requires": { "requires": {
"@actions/core": "^1.2.1", "@actions/core": "^1.2.1",

View file

@ -29,7 +29,7 @@
}, },
"homepage": "https://github.com/actions/upload-artifact#readme", "homepage": "https://github.com/actions/upload-artifact#readme",
"devDependencies": { "devDependencies": {
"@actions/artifact": "^0.3.0", "@actions/artifact": "^0.3.1",
"@actions/core": "^1.2.3", "@actions/core": "^1.2.3",
"@actions/glob": "^0.1.0", "@actions/glob": "^0.1.0",
"@actions/io": "^1.0.2", "@actions/io": "^1.0.2",