mirror of
https://github.com/actions/upload-artifact
synced 2024-12-22 06:22:45 +00:00
add tests for merge artifact
This commit is contained in:
parent
fd1ae7b288
commit
d5febab9d6
2 changed files with 245 additions and 78 deletions
175
__tests__/merge.test.ts
Normal file
175
__tests__/merge.test.ts
Normal file
|
@ -0,0 +1,175 @@
|
|||
import * as core from '@actions/core'
|
||||
import artifact from '@actions/artifact'
|
||||
import {run} from '../src/merge/merge-artifact'
|
||||
import {Inputs} from '../src/merge/constants'
|
||||
import * as search from '../src/shared/search'
|
||||
|
||||
const fixtures = {
|
||||
artifactName: 'my-merged-artifact',
|
||||
tmpDirectory: '/tmp/merge-artifact',
|
||||
filesToUpload: [
|
||||
'/some/artifact/path/file-a.txt',
|
||||
'/some/artifact/path/file-b.txt',
|
||||
'/some/artifact/path/file-c.txt'
|
||||
],
|
||||
artifacts: [
|
||||
{
|
||||
name: 'my-artifact-a',
|
||||
id: 1,
|
||||
size: 100,
|
||||
createdAt: new Date('2024-01-01T00:00:00Z')
|
||||
},
|
||||
{
|
||||
name: 'my-artifact-b',
|
||||
id: 2,
|
||||
size: 100,
|
||||
createdAt: new Date('2024-01-01T00:00:00Z')
|
||||
},
|
||||
{
|
||||
name: 'my-artifact-c',
|
||||
id: 3,
|
||||
size: 100,
|
||||
createdAt: new Date('2024-01-01T00:00:00Z')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
context: {
|
||||
repo: {
|
||||
owner: 'actions',
|
||||
repo: 'toolkit'
|
||||
},
|
||||
runId: 123,
|
||||
serverUrl: 'https://github.com'
|
||||
}
|
||||
}))
|
||||
|
||||
jest.mock('@actions/core')
|
||||
|
||||
jest.mock('fs/promises', () => ({
|
||||
mkdtemp: jest.fn().mockResolvedValue('/tmp/merge-artifact'),
|
||||
rm: jest.fn().mockResolvedValue(undefined)
|
||||
}))
|
||||
|
||||
/* eslint-disable no-unused-vars */
|
||||
const mockInputs = (overrides?: Partial<{[K in Inputs]?: any}>) => {
|
||||
const inputs = {
|
||||
[Inputs.Into]: 'my-merged-artifact',
|
||||
[Inputs.Pattern]: '*',
|
||||
[Inputs.SeparateDirectories]: false,
|
||||
[Inputs.RetentionDays]: 0,
|
||||
[Inputs.CompressionLevel]: 6,
|
||||
[Inputs.DeleteMerged]: false,
|
||||
...overrides
|
||||
}
|
||||
|
||||
;(core.getInput as jest.Mock).mockImplementation((name: string) => {
|
||||
return inputs[name]
|
||||
})
|
||||
;(core.getBooleanInput as jest.Mock).mockImplementation((name: string) => {
|
||||
return inputs[name]
|
||||
})
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
describe('merge', () => {
|
||||
beforeEach(async () => {
|
||||
mockInputs()
|
||||
|
||||
jest
|
||||
.spyOn(artifact, 'listArtifacts')
|
||||
.mockResolvedValue({artifacts: fixtures.artifacts})
|
||||
|
||||
jest.spyOn(artifact, 'downloadArtifact').mockResolvedValue({
|
||||
downloadPath: fixtures.tmpDirectory
|
||||
})
|
||||
|
||||
jest.spyOn(search, 'findFilesToUpload').mockResolvedValue({
|
||||
filesToUpload: fixtures.filesToUpload,
|
||||
rootDirectory: fixtures.tmpDirectory
|
||||
})
|
||||
|
||||
jest.spyOn(artifact, 'uploadArtifact').mockResolvedValue({
|
||||
size: 123,
|
||||
id: 1337
|
||||
})
|
||||
|
||||
jest
|
||||
.spyOn(artifact, 'deleteArtifact')
|
||||
.mockImplementation(async artifactName => {
|
||||
const artifact = fixtures.artifacts.find(a => a.name === artifactName)
|
||||
if (!artifact) throw new Error(`Artifact ${artifactName} not found`)
|
||||
return {id: artifact.id}
|
||||
})
|
||||
})
|
||||
|
||||
it('merges artifacts', async () => {
|
||||
await run()
|
||||
|
||||
for (const a of fixtures.artifacts) {
|
||||
expect(artifact.downloadArtifact).toHaveBeenCalledWith(a.id, {
|
||||
path: fixtures.tmpDirectory
|
||||
})
|
||||
}
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.tmpDirectory,
|
||||
{compressionLevel: 6}
|
||||
)
|
||||
})
|
||||
|
||||
it('fails if no artifacts found', async () => {
|
||||
mockInputs({[Inputs.Pattern]: 'this-does-not-match'})
|
||||
|
||||
expect(run()).rejects.toThrow()
|
||||
|
||||
expect(artifact.uploadArtifact).not.toBeCalled()
|
||||
expect(artifact.downloadArtifact).not.toBeCalled()
|
||||
})
|
||||
|
||||
it('supports custom compression level', async () => {
|
||||
mockInputs({
|
||||
[Inputs.CompressionLevel]: 2
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.tmpDirectory,
|
||||
{compressionLevel: 2}
|
||||
)
|
||||
})
|
||||
|
||||
it('supports custom retention days', async () => {
|
||||
mockInputs({
|
||||
[Inputs.RetentionDays]: 7
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.tmpDirectory,
|
||||
{retentionDays: 7, compressionLevel: 6}
|
||||
)
|
||||
})
|
||||
|
||||
it('supports deleting artifacts after merge', async () => {
|
||||
mockInputs({
|
||||
[Inputs.DeleteMerged]: true
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
for (const a of fixtures.artifacts) {
|
||||
expect(artifact.deleteArtifact).toHaveBeenCalledWith(a.name)
|
||||
}
|
||||
})
|
||||
})
|
|
@ -17,85 +17,77 @@ export const chunk = <T>(arr: T[], n: number): T[][] =>
|
|||
}, [] as T[][])
|
||||
|
||||
export async function run(): Promise<void> {
|
||||
const inputs = getInputs()
|
||||
const tmpDir = await mkdtemp('merge-artifact')
|
||||
|
||||
const listArtifactResponse = await artifactClient.listArtifacts({
|
||||
latest: true
|
||||
})
|
||||
const matcher = new Minimatch(inputs.pattern)
|
||||
const artifacts = listArtifactResponse.artifacts.filter(artifact =>
|
||||
matcher.match(artifact.name)
|
||||
)
|
||||
core.debug(
|
||||
`Filtered from ${listArtifactResponse.artifacts.length} to ${artifacts.length} artifacts`
|
||||
)
|
||||
|
||||
if (artifacts.length === 0) {
|
||||
throw new Error(`No artifacts found matching pattern '${inputs.pattern}'`)
|
||||
}
|
||||
|
||||
core.info(`Preparing to download the following artifacts:`)
|
||||
artifacts.forEach(artifact => {
|
||||
core.info(`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size})`)
|
||||
})
|
||||
|
||||
const downloadPromises = artifacts.map(artifact =>
|
||||
artifactClient.downloadArtifact(artifact.id, {
|
||||
path: inputs.separateDirectories
|
||||
? path.join(tmpDir, artifact.name)
|
||||
: tmpDir
|
||||
})
|
||||
)
|
||||
|
||||
const chunkedPromises = chunk(downloadPromises, PARALLEL_DOWNLOADS)
|
||||
for (const chunk of chunkedPromises) {
|
||||
await Promise.all(chunk)
|
||||
}
|
||||
|
||||
const options: UploadArtifactOptions = {}
|
||||
if (inputs.retentionDays) {
|
||||
options.retentionDays = inputs.retentionDays
|
||||
}
|
||||
|
||||
if (typeof inputs.compressionLevel !== 'undefined') {
|
||||
options.compressionLevel = inputs.compressionLevel
|
||||
}
|
||||
|
||||
const searchResult = await findFilesToUpload(tmpDir)
|
||||
|
||||
await uploadArtifact(
|
||||
inputs.into,
|
||||
searchResult.filesToUpload,
|
||||
searchResult.rootDirectory,
|
||||
options
|
||||
)
|
||||
|
||||
core.info(
|
||||
`The ${artifacts.length} artifact(s) have been successfully merged!`
|
||||
)
|
||||
|
||||
if (inputs.deleteMerged) {
|
||||
const deletePromises = artifacts.map(artifact =>
|
||||
artifactClient.deleteArtifact(artifact.name)
|
||||
)
|
||||
await Promise.all(deletePromises)
|
||||
core.info(`The ${artifacts.length} artifact(s) have been deleted`)
|
||||
}
|
||||
|
||||
try {
|
||||
const inputs = getInputs()
|
||||
const tmpDir = await mkdtemp('merge-artifact')
|
||||
|
||||
const listArtifactResponse = await artifactClient.listArtifacts({
|
||||
latest: true
|
||||
})
|
||||
const matcher = new Minimatch(inputs.pattern)
|
||||
const artifacts = listArtifactResponse.artifacts.filter(artifact =>
|
||||
matcher.match(artifact.name)
|
||||
)
|
||||
core.debug(
|
||||
`Filtered from ${listArtifactResponse.artifacts.length} to ${artifacts.length} artifacts`
|
||||
)
|
||||
|
||||
if (artifacts.length === 0) {
|
||||
throw new Error(`No artifacts found matching pattern '${inputs.pattern}'`)
|
||||
}
|
||||
|
||||
core.info(`Preparing to download the following artifacts:`)
|
||||
artifacts.forEach(artifact => {
|
||||
core.info(
|
||||
`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size})`
|
||||
)
|
||||
})
|
||||
|
||||
const downloadPromises = artifacts.map(artifact =>
|
||||
artifactClient.downloadArtifact(artifact.id, {
|
||||
path: inputs.separateDirectories
|
||||
? path.join(tmpDir, artifact.name)
|
||||
: tmpDir
|
||||
})
|
||||
)
|
||||
|
||||
const chunkedPromises = chunk(downloadPromises, PARALLEL_DOWNLOADS)
|
||||
for (const chunk of chunkedPromises) {
|
||||
await Promise.all(chunk)
|
||||
}
|
||||
|
||||
const options: UploadArtifactOptions = {}
|
||||
if (inputs.retentionDays) {
|
||||
options.retentionDays = inputs.retentionDays
|
||||
}
|
||||
|
||||
if (typeof inputs.compressionLevel !== 'undefined') {
|
||||
options.compressionLevel = inputs.compressionLevel
|
||||
}
|
||||
|
||||
const searchResult = await findFilesToUpload(tmpDir)
|
||||
|
||||
await uploadArtifact(
|
||||
inputs.into,
|
||||
searchResult.filesToUpload,
|
||||
searchResult.rootDirectory,
|
||||
options
|
||||
)
|
||||
|
||||
core.info(
|
||||
`The ${artifacts.length} artifact(s) have been successfully merged!`
|
||||
)
|
||||
|
||||
if (inputs.deleteMerged) {
|
||||
const deletePromises = artifacts.map(artifact =>
|
||||
artifactClient.deleteArtifact(artifact.name)
|
||||
)
|
||||
await Promise.all(deletePromises)
|
||||
core.info(`The ${artifacts.length} artifact(s) have been deleted`)
|
||||
}
|
||||
|
||||
try {
|
||||
await rm(tmpDir, {recursive: true})
|
||||
} catch (error) {
|
||||
core.warning(
|
||||
`Unable to remove temporary directory: ${(error as Error).message}`
|
||||
)
|
||||
}
|
||||
await rm(tmpDir, {recursive: true})
|
||||
} catch (error) {
|
||||
core.setFailed((error as Error).message)
|
||||
core.warning(
|
||||
`Unable to remove temporary directory: ${(error as Error).message}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
||||
|
|
Loading…
Reference in a new issue