2017-04-26 211 views
0

我使用此Lambda函數即時生成縮略圖。但我得到以下錯誤:AWS Lambda達到內存限制

REPORT RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Duration: 188.18 ms Billed Duration: 200 ms Memory Size: 1536 MB Max Memory Used: 1536 MB 

AND ...

RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Process exited before completing request 

因此,我認爲我達到最大內存限制。沒有函數「uploadRecentImage()」它就起作用了。但是,如果我添加一個新的大小imgVariants []我也會打內存限制。 我認爲這個函數處理imgVariants(每個循環)的方式會導致這種情況,但我不知道如何讓它變得更好。 我將不勝感激任何幫助。

這裏是我的功能:

// dependencies 
var async = require('async'); 
var AWS = require('aws-sdk'); 
var gm = require('gm').subClass({ 
    imageMagick: true 
}); // use ImageMagick 
var util = require('util'); 

// configuration as code - add, modify, remove array elements as desired 
var imgVariants = [ 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 6000, 
    "MAX_HEIGHT": 6000, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    }, 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 1280, 
    "MAX_HEIGHT": 1280, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    }, 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 500, 
    "MAX_HEIGHT": 500, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    }, 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 100, 
    "MAX_HEIGHT": 100, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    } 
]; 
var DST_BUCKET_POSTFIX = "resized"; 



// get reference to S3 client 
var s3 = new AWS.S3(); 

exports.handler = function (event, context) { 
    // Read options from the event. 
    console.log("Reading options from event:\n", util.inspect(event, { 
    depth: 5 
    })); 
    var srcBucket = event.Records[0].s3.bucket.name; 
    // Object key may have spaces or unicode non-ASCII characters. 
    var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " ")); 
    // derive the file name and extension 
    var srcFile = srcKey.match(/(.+)\.([^.]+)/); 

    var srcName = srcFile[1]; 
    var scrExt = srcFile[2]; 
    // set the destination bucket 
    var dstBucket = srcBucket + DST_BUCKET_POSTFIX; 


    // make sure that source and destination are different buckets. 
    if (srcBucket === dstBucket) { 
    console.error("Destination bucket must be different from source bucket."); 
    return; 
    } 

    if (!scrExt) { 
    console.error('unable to derive file type extension from file key ' + srcKey); 
    return; 
    } 

    if (scrExt != "jpg" && scrExt != "png") { 
    console.log('skipping non-supported file type ' + srcKey + ' (must be jpg or png)'); 
    return; 
    } 

    function processImage(data, options, callback) { 
    gm(data.Body).size(function (err, size) { 

     var scalingFactor = Math.min(
     options.MAX_WIDTH/size.width, 
     options.MAX_HEIGHT/size.height 
    ); 
     var width = scalingFactor * size.width; 
     var height = scalingFactor * size.height; 

     this.resize(width, height) 
     .quality(options.SIZING_QUALITY || 75) 
     .interlace(options.INTERLACE || 'None') 
     .toBuffer(scrExt, function (err, buffer) { 
      if (err) { 
      callback(err); 

      } else { 
      uploadImage(data.ContentType, buffer, options, callback); 
      uploadRecentImage(data.ContentType, buffer, options, callback); 
      } 
     }); 
    }); 
    } 

    function uploadImage(contentType, data, options, callback) { 
    // Upload the transformed image to the destination S3 bucket. 
    s3.putObject({ 
     Bucket: dstBucket, 
     Key: options.MAX_WIDTH + '/' + srcName + '.' + scrExt, 
     Body: data, 
     ContentType: contentType 
     }, 
     callback); 
    } 


    function uploadRecentImage(contentType, data, options, callback) { 
    if(options.MAX_WIDTH == 500){ 
     s3.putObject({ 
      Bucket: dstBucket, 
      Key: 'recent_optimized.' + scrExt, 
      Body: data, 
      ContentType: contentType 
      }, 
      callback); 
    } 
    if(options.MAX_WIDTH == 100){ 
      s3.putObject({ 
      Bucket: dstBucket, 
      Key: 'recent_thumb.' + scrExt, 
      Body: data, 
      ContentType: contentType 
      }, 
      callback); 
    } 
    } 


    // Download the image from S3 and process for each requested image variant. 
    async.waterfall(
    [ 
     function download(next) { 
      // Download the image from S3 into a buffer. 
      s3.getObject({ 
       Bucket: srcBucket, 
       Key: srcKey 
      }, 
      next); 
     }, 
     function processImages(data, next) { 
      async.each(imgVariants, function (variant, next) { 
      processImage(data, variant, next); 
      }, next); 

     } 

    ], 
    function (err) { 
     if (err) { 
     console.error(
      'Unable to resize ' + srcBucket + '/' + srcKey + 
      ' and upload to ' + dstBucket + 
      ' due to an error: ' + err 
     ); 
     } else { 
     console.log(
      'Successfully resized ' + srcBucket + '/' + srcKey + 
      ' and uploaded to ' + dstBucket 
     ); 
     } 

     context.done(); 
    } 
); 
}; 
+1

雖然循環,你真的需要保持已處理的圖像在內存中嗎?爲什麼不讓你的源碼在循環中調用'lambda'函數而不是在lambda中循環? – kosa

+0

如果您需要1.5GB來處理您的縮略圖,那麼您的內存泄漏問題很嚴重。 *不要*嘗試一次處理所有圖像。 *除非必須,否則不要在內存中保留任何內容。處理完每張圖像後清理。 *不要*使用全局結構,特別是如果你想並行處理圖像。全局結構不可能清理 –

+0

或者,您可以考慮使用「調整大小作爲服務」系統,如[Cloudinary](http://cloudinary.com/)和[Imgix](http://imgix.com/) 。 –

回答

0
  1. 您可以限制並行processImages調用的次數:

更換async.each(imgVariants,

async.eachLimit(imgVariants, 2,

不處理多比在兩個圖像平行。

  • 腳本有一個錯誤:
  • uploadImage(data.ContentType, buffer, options, callback); uploadRecentImage(data.ContentType, buffer, options, callback); 這將調用這是不允許的callback兩次。只調用一次回調!

    1. 腳本有另一個bug:event.Records[0]它只會處理第一張圖片。如果您同時上傳多張圖片,則會丟失一些圖片。
    +0

    1.但是,比所有imgVariants處理?還是隻有第2個?如果我這樣做,我沒有內存錯誤,但也沒有調整大小的圖像:-( – MarkusHH