使用 Lambda 节点从 S3 上的文件在 S3 上创建一个 zip 文件

新手上路,请多包涵

我需要创建一个 Zip 文件,其中包含位于我的 s3 存储桶中的一系列文件(视频和图像)。

目前使用下面的代码的问题是我很快就达到了 Lambda 的内存限制。

 async.eachLimit(files, 10, function(file, next) {
    var params = {
        Bucket: bucket, // bucket name
        Key: file.key
    };
    s3.getObject(params, function(err, data) {
        if (err) {
            console.log('file', file.key);
            console.log('get image files err',err, err.stack); // an error occurred
        } else {
            console.log('file', file.key);
            zip.file(file.key, data.Body);
            next();
        }
    });
},
function(err) {
    if (err) {
        console.log('err', err);
    } else {
        console.log('zip', zip);
        content = zip.generateNodeStream({
            type: 'nodebuffer',
            streamFiles:true
        });
        var params = {
            Bucket: bucket, // name of dest bucket
            Key: 'zipped/images.zip',
            Body: content
        };
        s3.upload(params, function(err, data) {
            if (err) {
                console.log('upload zip to s3 err',err, err.stack); // an error occurred
            } else {
                console.log(data); // successful response
            }
        });
    }
});

  • 这可以使用 Lambda,还是我应该看看不同的方法?

  • 是否可以即时写入压缩的 zip 文件,从而在一定程度上消除内存问题,还是我需要在压缩之前收集文件?

任何帮助将非常感激。

原文由 Rabona 发布,翻译遵循 CC BY-SA 4.0 许可协议

阅读 437
2 个回答

好的,我今天必须这样做,它有效。 Direct Buffer to Stream,不涉及磁盘。所以内存或磁盘限制在这里不会成为问题:

 'use strict';

const AWS = require("aws-sdk");
AWS.config.update( { region: "eu-west-1" } );
const s3 = new AWS.S3( { apiVersion: '2006-03-01'} );

const   _archiver = require('archiver');

//This returns us a stream.. consider it as a real pipe sending fluid to S3 bucket.. Don't forget it
const streamTo = (_bucket, _key) => {
	var stream = require('stream');
	var _pass = new stream.PassThrough();
	s3.upload( { Bucket: _bucket, Key: _key, Body: _pass }, (_err, _data) => { /*...Handle Errors Here*/ } );
	return _pass;
};

exports.handler = async (_req, _ctx, _cb) => {
	var _keys = ['list of your file keys in s3'];

    var _list = await Promise.all(_keys.map(_key => new Promise((_resolve, _reject) => {
            s3.getObject({Bucket:'bucket-name', Key:_key})
                .then(_data => _resolve( { data: _data.Body, name: `${_key.split('/').pop()}` } ));
        }
    ))).catch(_err => { throw new Error(_err) } );

    await new Promise((_resolve, _reject) => {
        var _myStream = streamTo('bucket-name', 'fileName.zip');		//Now we instantiate that pipe...
        var _archive = _archiver('zip');
        _archive.on('error', err => { throw new Error(err); } );

        //Your promise gets resolved when the fluid stops running... so that's when you get to close and resolve
        _myStream.on('close', _resolve);
        _myStream.on('end', _resolve);
        _myStream.on('error', _reject);

        _archive.pipe(_myStream);			//Pass that pipe to _archive so it can push the fluid straigh down to S3 bucket
        _list.forEach(_itm => _archive.append(_itm.data, { name: _itm.name } ) );		//And then we start adding files to it
        _archive.finalize();				//Tell is, that's all we want to add. Then when it finishes, the promise will resolve in one of those events up there
    }).catch(_err => { throw new Error(_err) } );

    _cb(null, { } );		//Handle response back to server
};

原文由 iocoker 发布,翻译遵循 CC BY-SA 4.0 许可协议

我根据@iocoker 格式化了代码。

主条目

// index.js

'use strict';
const S3Zip = require('./s3-zip')

const params = {
  files: [
    {
      fileName: '1.jpg',
      key: 'key1.JPG'
    },
    {
      fileName: '2.jpg',
      key: 'key2.JPG'
    }
  ],
  zippedFileKey: 'zipped-file-key.zip'
}

exports.handler = async event => {
  const s3Zip = new S3Zip(params);
  await s3Zip.process();

  return {
    statusCode: 200,
    body: JSON.stringify(
      {
        message: 'Zip file successfully!'
      }
    )
  };

}

压缩文件实用程序

// s3-zip.js

'use strict';
const fs = require('fs');
const AWS = require("aws-sdk");

const Archiver = require('archiver');
const Stream = require('stream');

const https = require('https');
const sslAgent = new https.Agent({
  KeepAlive: true,
  rejectUnauthorized: true
});
sslAgent.setMaxListeners(0);
AWS.config.update({
  httpOptions: {
    agent: sslAgent,
  },
  region: 'us-east-1'
});

module.exports = class S3Zip {
  constructor(params, bucketName = 'default-bucket') {
    this.params = params;
    this.BucketName = bucketName;
  }

  async process() {
    const { params, BucketName } = this;
    const s3 = new AWS.S3({ apiVersion: '2006-03-01', params: { Bucket: BucketName } });

    // create readstreams for all the output files and store them
    const createReadStream = fs.createReadStream;
    const s3FileDwnldStreams = params.files.map(item => {
      const stream = s3.getObject({ Key: item.key }).createReadStream();
      return {
        stream,
        fileName: item.fileName
      }
    });

    const streamPassThrough = new Stream.PassThrough();
    // Create a zip archive using streamPassThrough style for the linking request in s3bucket
    const uploadParams = {
      ACL: 'private',
      Body: streamPassThrough,
      ContentType: 'application/zip',
      Key: params.zippedFileKey
    };

    const s3Upload = s3.upload(uploadParams, (err, data) => {
      if (err) {
        console.error('upload err', err)
      } else {
        console.log('upload data', data);
      }
    });

    s3Upload.on('httpUploadProgress', progress => {
      // console.log(progress); // { loaded: 4915, total: 192915, part: 1, key: 'foo.jpg' }
    });

    // create the archiver
    const archive = Archiver('zip', {
      zlib: { level: 0 }
    });
    archive.on('error', (error) => {
      throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
    });

    // connect the archiver to upload streamPassThrough and pipe all the download streams to it
    await new Promise((resolve, reject) => {
      console.log("Starting upload of the output Files Zip Archive");

      streamPassThrough.on('close', resolve());
      streamPassThrough.on('end', resolve());
      streamPassThrough.on('error', reject());

      archive.pipe(streamPassThrough);
      s3FileDwnldStreams.forEach((s3FileDwnldStream) => {
        archive.append(s3FileDwnldStream.stream, { name: s3FileDwnldStream.fileName })
      });
      archive.finalize();

    }).catch((error) => {
      throw new Error(`${error.code} ${error.message} ${error.data}`);
    });

    // Finally wait for the uploader to finish
    await s3Upload.promise();

  }
}

原文由 zhenqi li 发布,翻译遵循 CC BY-SA 4.0 许可协议

撰写回答
你尚未登录,登录后可以
  • 和开发者交流问题的细节
  • 关注并接收问题和回答的更新提醒
  • 参与内容的编辑和改进,让解决方法与时俱进
推荐问题