Skip to content

Instantly share code, notes, and snippets.

@dervalp
Last active August 29, 2015 14:23
Show Gist options
  • Save dervalp/3dcb5bb3228ceb13e701 to your computer and use it in GitHub Desktop.
Save dervalp/3dcb5bb3228ceb13e701 to your computer and use it in GitHub Desktop.
Dirty image crop
var knox = require("knox")
, conf = require("../conf")
, http = require('http')
, url = require('url')
, gm = require('gm')
, conf = require("../conf")
, request = require("request")
, imageMagick = gm.subClass({ imageMagick: true })
, imageHelper = require("./imageHelper")
, fs = require("fs");
var gm = require('gm');
if (Number(gm.version.replace(/\./g, '')) < 160) {
throw new Error('Version 1.6.0 or higher of "gm" module is required.')
}
gm.prototype.buffer = function(callback) {
this.stream(function(err, stdout) {
var buf = '';
if (err) {
return callback(err);
}
stdout
.on('data', function(data) {
buf += data.toString('binary');
})
.on('end', function() {
callback(null, new Buffer(buf, 'binary'));
});
});
return this;
};
var getClient = function(bucketName) {
return knox.createClient({
key: conf.s3.key
, secret: conf.s3.secret
, bucket: bucketName
});
};
var createHeader = function(type, length){
return {
'Content-Type': type,
'Content-Length': length,
'x-amz-acl': 'public-read',
'Cache-Control': 'public,max-age=290304000'
};
};
var uploadFromBuffer = function(buf, fileName, bucket, type, callback) {
var client = getClient(bucket),
headers = createHeader(type, buf.length);
client.putBuffer(buf, fileName, headers, function(err) {
if(err) { console.log(err) }
console.log("I am in uploadFromBuffer and I will callback");
callback(client.url("/" + fileName));
});
};
var crop = function(readstream, coords, callback) {
var width = coords.x2 - coords.x1,
heigth = coords.y2 - coords.y1;
imageMagick(readstream)
.crop(width, heigth, coords.x1, coords.y1)
.buffer(function(err, buf) {
console.log("error form imageMagick!!!!!!!!")
console.log(err);
callback(buf);
});
};
var downloadFromWeb = function(url, callback) {
request(url, {encoding: null}, callback);
};
var uploadFile = function(bucket, contentType, contentLength, body, fileName, callback) {
var client = getClient(bucket),
req = client.put(fileName, {
'Content-Type': contentType,
'Content-Length': contentLength,
'x-amz-acl': 'public-read',
'Cache-Control': 'public,max-age=290304000'
});
req.on('response', function(rs) {
console.log('response from s3, status:', rs.statusCode, 'url:', req.url);
callback(client.url("/" + fileName))
});
req.on('error', function(err) {
console.error('Error uploading to s3:', err);
callback(undefined, err);
});
req.end(body);
}
var upload = {
uploadAndCrop: function (pic, coords, fileName, callback) {
var readstream = fs.createReadStream(pic.path);
console.log(readstream);
crop(readstream, coords, function(buffer) {
console.log("Buffer size !!!");
console.log(buffer.length);
uploadFromBuffer(buffer, fileName, conf.s3.bucket.article, pic.type, callback);
});
},
upload: function(pic, fileName, callback) {
var client = getClient(conf.s3.bucket.article),
stream = fs.createReadStream(pic.path),
headers = createHeader(pic.type, pic.size);
client.putStream(stream, fileName, headers, function(err) {
if(err) { console.log(err); throw "error Uploading to Amazone"; }
callback(client.url("/" + fileName));
});
},
uploadAndCropFromUrl : function(url, coords, fileName, callback) {
downloadFromWeb(url, function(err, req, body) {
crop(body, coords, function(buffer){
uploadFromBuffer(buffer, fileName, conf.s3.bucket.post, req.headers["content-type"], callback);
});
});
},
uploadFromUrl: function(url, fileName, callback) {
downloadFromWeb(url, function(err, req, body) {
uploadFile(conf.s3.bucket.post, req.headers['content-type'], req.headers['content-length'], body, fileName, callback);
});
}
};
module.exports = upload;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment