2017-06-14 113 views
-1

在同一個firebase項目中,使用雲功能(用node.js編寫),首先下載一個FTP文件(使用npm ftp模塊),然後嘗試將其上傳到firebase存儲。如何使用Cloud Functions for Firebase將FTP文件上傳到Firebase存儲?

到目前爲止,每一次嘗試都失敗了,文檔沒有幫助...任何專家意見和建議將不勝感激?

以下代碼使用兩種不同的方法:fs.createWriteStream()和bucket.file()。createWriteStream()。兩者都失敗,但由於不同的原因(請參閱代碼中的錯誤消息)。

'use strict' 

// [START import] 
let admin = require('firebase-admin') 
let functions = require('firebase-functions') 
const gcpStorage = require('@google-cloud/storage')() 
admin.initializeApp(functions.config().firebase)  
var FtpClient = require('ftp') 
var fs = require('fs') 
// [END import] 

// [START Configs] 
// Firebase Storage is configured with the following rules and grants read write access to everyone 
/* 
service firebase.storage { 
    match /b/{bucket}/o { 
    match /{allPaths=**} { 
     allow read, write; 
    } 
    } 
} 
*/ 
// Replace this with your project id, will be use by: const bucket = gcpStorage.bucket(firebaseProjectID) 
const firebaseProjectID = 'your_project_id' 
// Public FTP server, uploaded files are removed after 48 hours ! Upload new ones when needed for testing 
const CONFIG = { 
    test_ftp: { 
    source_path: '/48_hour', 
    ftp: { 
     host: 'ftp.uconn.edu' 
    } 
    } 
} 
const SOURCE_FTP = CONFIG.test_ftp 
// [END Configs] 

// [START saveFTPFileWithFSCreateWriteStream] 
function saveFTPFileWithFSCreateWriteStream(file_name) { 
    const ftpSource = new FtpClient() 
    ftpSource.on('ready', function() { 
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) { 
     if (err) throw err 
     stream.once('close', function() { ftpSource.end() }) 
     stream.pipe(fs.createWriteStream(file_name)) 
     console.log('File downloaded: ', file_name) 
    }) 
    }) 
    ftpSource.connect(SOURCE_FTP.ftp) 
} 
// This fails with the following error in firebase console: 
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native) 
// [END saveFTPFileWithFSCreateWriteStream] 

// [START saveFTPFileWithBucketUpload]  
function saveFTPFileWithBucketUpload(file_name) { 
    const bucket = gcpStorage.bucket(firebaseProjectID) 
    const file = bucket.file(file_name) 
    const ftpSource = new FtpClient() 
    ftpSource.on('ready', function() { 
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) { 
     if (err) throw err 
     stream.once('close', function() { ftpSource.end() }) 
     stream.pipe(file.createWriteStream()) 
     console.log('File downloaded: ', file_name) 
    }) 
    }) 
    ftpSource.connect(SOURCE_FTP.ftp) 
}  
// [END saveFTPFileWithBucketUpload] 

// [START database triggers] 
// Listens for new triggers added to /ftp_fs_triggers/:pushId and calls the saveFTPFileWithFSCreateWriteStream 
// function to save the file in the default project storage bucket 
exports.dbTriggersFSCreateWriteStream = functions.database 
    .ref('/ftp_fs_triggers/{pushId}') 
    .onWrite(event => { 
    const trigger = event.data.val() 
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz' 
    return saveFTPFileWithFSCreateWriteStream(trigger.file_name) 
    // This fails with the following error in firebase console: 
    // Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native) 
    }) 
// Listens for new triggers added to /ftp_bucket_triggers/:pushId and calls the saveFTPFileWithBucketUpload 
// function to save the file in the default project storage bucket 
exports.dbTriggersBucketUpload = functions.database 
    .ref('/ftp_bucket_triggers/{pushId}') 
    .onWrite(event => { 
    const trigger = event.data.val() 
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz' 
    return saveFTPFileWithBucketUpload(trigger.file_name) 
    // This fails with the following error in firebase console: 
    /* 
    Error: Uncaught, unspecified "error" event. ([object Object]) 
    at Pumpify.emit (events.js:163:17) 
    at Pumpify.onerror (_stream_readable.js:579:12) 
    at emitOne (events.js:96:13) 
    at Pumpify.emit (events.js:188:7) 
    at Pumpify.Duplexify._destroy (/user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:184:15) 
    at /user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:175:10 
    at _combinedTickCallback (internal/process/next_tick.js:67:7) 
    at process._tickDomainCallback (internal/process/next_tick.js:122:9) 
    */ 
    }) 
// [END database triggers] 
+1

請編輯您的問題,包括你的函數相關的代碼。 –

+0

對不起,我已經添加了代碼,我測試了兩種不同的方法,但沒有成功。 – denisgmag

回答

1

我終於找到了實現這個的正確方法。

1)確保存儲桶被正確引用。最初,我只用 我的project_id沒有'.appspot.com'在最後'。

const bucket = gsc.bucket('<project_id>.appspot.com') 

2)首先創建一個存儲桶流,然後通過管道將來自FTP get調用的流傳送到bucketWriteStream。請注意,file_name將是保存文件的名稱(該文件不必事先存在)。

ftpSource.get(filePath, function(err, stream) { 
    if (err) throw err 
    stream.once('close', function() { ftpSource.end() }) 

    // This didn't work ! 
    //stream.pipe(fs.createWriteStream(fileName)) 

    // This works... 
    let bucketWriteStream = bucket.file(fileName).createWriteStream() 
    stream.pipe(bucketWriteStream) 
}) 

的Et瞧,就像一個魅力...

相關問題