node.js - Create a zip file on S3 from files on S3 using Lambda Node -
i need create zip file consists of selection of files (videos , images) located in s3 bucket.
the problem @ moment using code below hit memory limit on lambda.
async.eachlimit(files, 10, function(file, next) { var params = { bucket: bucket, // bucket name key: file.key }; s3.getobject(params, function(err, data) { if (err) { console.log('file', file.key); console.log('get image files err',err, err.stack); // error occurred } else { console.log('file', file.key); zip.file(file.key, data.body); next(); } }); }, function(err) { if (err) { console.log('err', err); } else { console.log('zip', zip); content = zip.generatenodestream({ type: 'nodebuffer', streamfiles:true }); var params = { bucket: bucket, // name of dest bucket key: 'zipped/images.zip', body: content }; s3.upload(params, function(err, data) { if (err) { console.log('upload zip s3 err',err, err.stack); // error occurred } else { console.log(data); // successful response } }); } });
is possible using lambda, or should @ different approach?
is possible write compressed zip file on fly, therefore eliminating memory issue somewhat, or need have files collected before compression?
any appreciated.
using streams may tricky i'm not sure how pipe multiple streams object. i've done several times using standard file object. it's multistep process , it's quite fast. remember lambda operates in linux have linux resources @ hand including system /tmp directory.
- create sub-directory in /tmp call "transient" or whatever works you
- use s3.getobject() , write file objects /tmp/transient
- use glob package generate array[] of paths /tmp/transient
- loop array , zip.addlocalfile(array[i]);
- zip.writezip('tmp/files.zip');
Comments
Post a Comment