Created
May 29, 2019 18:16
-
-
Save dyusupov/d51dddf8a384851270e8c585457db6bd to your computer and use it in GitHub Desktop.
GCP GS and EdgeFS create synchronization via Cloud function
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
'use strict'; | |
const AWS = require('aws-sdk'); | |
const Storage = require('@google-cloud/storage'); | |
// ======= config section ======= | |
var GCP_ACCOUNT = "xxxxxxxx@appspot.gserviceaccount.com"; // GCP Service Account Email | |
var EDGEFS_PROVIDER = "gcs-p12"; // Has to match EdgeFS Cloud Provider name | |
var EDGEFS_ACCESS = "edgefs_access_key"; // EdgeFS tenant user S3 access key | |
var EDGEFS_SECRET = "edgefs_secret"; // EdgeFS tenant user S3 secret key | |
var EDGEFS_REGION = "cltest"; // EdgeFS cluster namespace or cofigured S3 origin | |
var EDGEFS_ENDPOINT = "http://IP:9982"; // EdgeFS S3 service endpoint | |
var EDGEFS_BUCKET = "bk1"; // EdgeFS configured tenant's bucket | |
var EDGEFS_MDONLY = "false"; // If enabled then fetch data on demand | |
// ============================== | |
function uploadFromStream(s3e, gcpRegion, gcpBucket, key, headData, callback) { | |
var stream = require('stream'); | |
var pass = new stream.PassThrough(); | |
var meta = { | |
"cloud-provider": EDGEFS_PROVIDER, | |
"cloud-region": gcpRegion, | |
"cloud-origin": gcpBucket, | |
"cloud-key": GCP_ACCOUNT, | |
"cloud-mdonly": EDGEFS_MDONLY | |
}; | |
// merge in custom metadata if any | |
if ("Metadata" in headData) { | |
Object.keys(headData["Metadata"]).forEach(function (key) { | |
meta[key] = headData["Metadata"][key]; | |
}); | |
} | |
s3e.upload({Bucket: EDGEFS_BUCKET, Key: key, Body: pass, | |
ContentType: headData["ContentType"], Metadata: meta}, function(err, data) { | |
if (err) | |
return callback(err.toString()); | |
callback(); | |
}); | |
return pass; | |
} | |
exports.handler = (event, callback) => { | |
// console.log("event %j", event); | |
// console.log("env %j", process.env); | |
const key = event.data.name; | |
const bucket = event.data.bucket; | |
const region = process.env["X_GOOGLE_FUNCTION_REGION"]; | |
const evtype = event.context.eventType; | |
console.log("syncing on event", evtype, "in region", region, bucket + "/" + key, "to", EDGEFS_ENDPOINT, EDGEFS_BUCKET); | |
if (evtype === "google.storage.object.finalize") { | |
// Read from GCP storage | |
const storage = Storage(); | |
// Write to EdgeFS S3 endpoint | |
var s3e = new AWS.S3({apiVersion: '2006-03-01'}); | |
s3e.config.update({ | |
endpoint: new AWS.Endpoint(EDGEFS_ENDPOINT), | |
s3BucketEndpoint: false, | |
s3ForcePathStyle: true, | |
accessKeyId: EDGEFS_ACCESS, | |
secretAccessKey: EDGEFS_SECRET, | |
region: EDGEFS_REGION | |
}); | |
var readStream = storage.bucket(bucket).file(key).createReadStream(); | |
readStream.pipe(uploadFromStream(s3e, region, bucket, key, {}, callback)); | |
} else if (evtype === "google.storage.object.delete") { | |
// Remove via EdgeFS S3 endpoint | |
var s3e = new AWS.S3({apiVersion: '2006-03-01'}); | |
s3e.config.update({ | |
s3BucketEndpoint: false, | |
s3ForcePathStyle: true, | |
accessKeyId: EDGEFS_ACCESS, | |
secretAccessKey: EDGEFS_SECRET, | |
region: EDGEFS_REGION | |
}); | |
s3e.endpoint = new AWS.Endpoint(EDGEFS_ENDPOINT); | |
var req = s3e.deleteObject({Bucket: EDGEFS_BUCKET, Key: key}); | |
req.on('build', function () { | |
req.httpRequest.headers["x-cloud-delete"] = EDGEFS_PROVIDER; | |
}); | |
req.send(function(err, data) { | |
if (err) | |
return callback(err.toString()); | |
callback(); | |
}); | |
} | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment