-
-
Save tominal/c685a5ac1bc71e17558750f148d9bc21 to your computer and use it in GitHub Desktop.
Zip and Stream Files from and to S3 using AWS Lambda
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Based on the gist thread from | |
// | |
// @amiantos Lambda S3 Zipper http://amiantos.net/zip-multiple-files-on-aws-s3/ | |
// | |
// and | |
// | |
// @RyanClementsHax TypeScript implementation in the same Gist thread | |
// | |
// Adapted into JavaScript by @tominal | |
const { S3Client } = require('@aws-sdk/client-s3') | |
const { S3Service } = require('./s3Service') | |
const archiver = require('archiver') | |
const s3 = new S3Client({}) | |
const s3Service = new S3Service(s3) | |
const finalizeArchiveSafely = (archive) => { | |
return new Promise((resolve, reject) => { | |
archive.on('error', reject) | |
archive.finalize().then(resolve).catch(reject) | |
}) | |
} | |
exports.handler = async (event) => { | |
const archive = archiver('zip') | |
try { | |
for (const file of event.files) { | |
const downloadStream = s3Service.createLazyDownloadStreamFrom(event.bucket, file.key) | |
archive.append( | |
downloadStream, | |
{ | |
name: file.fileName | |
} | |
) | |
} | |
await Promise.all([ | |
finalizeArchiveSafely(archive), | |
s3Service.uploadTo(event.bucket, event.destinationKey, archive) | |
]) | |
} catch (e) { | |
archive.abort() | |
throw e | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const { PassThrough } = require('stream') | |
const { GetObjectCommand } = require('@aws-sdk/client-s3') | |
const { Upload } = require('@aws-sdk/lib-storage') | |
class S3Service { | |
constructor(s3) { | |
this.s3 = s3 | |
} | |
createLazyDownloadStreamFrom(bucket, key) { | |
let streamCreated = false | |
const stream = new PassThrough() | |
stream.on('newListener', async event => { | |
if (!streamCreated && event == 'data') { | |
await this.initDownloadStream(bucket, key, stream) | |
streamCreated = true | |
} | |
}) | |
return stream | |
} | |
async uploadTo(bucket, key, stream) { | |
const upload = new Upload({ | |
client: this.s3, | |
params: { | |
Bucket: bucket, | |
Key: key, | |
Body: stream.pipe(new PassThrough()), | |
ContentType: 'application/zip' | |
} | |
}) | |
await upload.done() | |
} | |
async initDownloadStream(bucket, key, stream) { | |
try { | |
const { Body: body } = await this.s3.send( | |
new GetObjectCommand({ Bucket: bucket, Key: key }) | |
) | |
if (!body) { | |
stream.emit( | |
'error', | |
new Error( | |
`got an undefined body from s3 when getting object ${bucket}/${key}` | |
) | |
) | |
} else if (!('on' in body)) { | |
stream.emit( | |
'error', | |
new Error( | |
`got a ReadableStream<any> (a stream used by browser fetch) or Blob from s3 when getting object ${bucket}/${key} instead of Readable` | |
) | |
) | |
} else { | |
body.on('error', err => stream.emit('error', err)).pipe(stream) | |
} | |
} catch (e) { | |
stream.emit('error', e) | |
} | |
} | |
} | |
module.exports = { S3Service } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment