I use this script in amazon lambda for image processing. Script should do:
- download image and watermark from amazon S3
- resize image to two sizes
- put watermark on it and annotate with text
- put resized and watermarked image to other bucket
- send to external host some metadata info
At this moment it works as expected, but looks ugly and not robust.
let config = {
watermark : 'watermark.png',
hostname: 'host-to-ping.com',
port:'80',
thumbBucket: 'thumb.bucket.name'
};
let aws = require('aws-sdk');
let s3 = new aws.S3({apiVersion: '2006-03-01'});
let im = require('imagemagick');
let fs = require('fs');
let mainFile = '/tmp/img.jpg';
let watermarkFile = '/tmp/watermark.png';
let http = require('http');
const getObjectPromise = (params, fileName) => {
return function (resolve, reject) {
s3.getObject(params, (err, data) => {
if (err) {
const message = `Error getting object ${key} from bucket ${bucket}.`;
reject(message);
} else {
fs.writeFileSync(fileName, data.Body);
resolve({file: fileName, data : data});
}
});
}
};
const getResizePromise = (data, width) => {
return function (resolve, reject) {
const resizedFile = `/tmp/resized-${width}.jpg`;
var event = {};
event.width = width;
event.srcData = data.Body;
event.dstPath = resizedFile;
try {
im.resize(event, (err, stdout, stderr) => {
if (err) {
throw err;
} else {
resolve(resizedFile);
}
});
} catch (err) {
reject(err);
}
}
};
const getWatermarkPromise= (file, watermark, annotation) => {
function identity(tmpFile, callback) {
im.identify(tmpFile, (err, output) => {
callback(output);
});
}
return function (resolve, reject) {
var exec = require('child_process').exec;
try {
identity(file, function (out) {
var h = out.geometry.split(/[x\+]/)
var size = parseInt(h[1] * 0.70);
var command = [
'composite',
'-geometry', 'x'+size+'+5',
'-dissolve', '50%',
'-gravity', 'east',
'-quality', 100,
watermark,
file,
file
];
var textcommand = [
'convert',
file,
'-fill White',
'-gravity east -annotate +165+60',
'"FD_'+annotation+'"',
file
];
//-draw "gravity south \
// fill black text 0,12 'Copyright' \
// fill white text 1,11 'Copyright' " \
console.log(command.join(' '));
exec(command.join(' '), function(err, stdout, stderr) {
// Do stuff with result here
console.log('watermarking done');
if (annotation) {
exec(textcommand.join(' '), function (err, stdout, stderr) {
console.log('annotating done');
resolve(file);
});
} else {
resolve(file);
}
});
});
}catch(e) {
reject("error while watermark")
}
};
};
const sendRequest = (rawParams, resizedParams, originKey, bucket, succeesCallback) => {
console.log("send request with params");
let params = {};
params['Key'] = originKey;
params['Bucket'] = bucket;
params['meta'] = rawParams.data.Metadata;
params['thumbs'] = resizedParams;
params['size'] = rawParams.data.ContentLength;
var serialize = function(obj, prefix) {
var str = [];
for(var p in obj) {
if (obj.hasOwnProperty(p)) {
var k = prefix ? prefix + "[" + p + "]" : p, v = obj[p];
str.push(typeof v == "object" ?
serialize(v, k) :
encodeURIComponent(k) + "=" + encodeURIComponent(v));
}
}
return str.join("&");
};
var options = {
hostname: config.hostname,
port: config.port,
path: '/upload/add',
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
};
var req = http.request(options, function(res) {
succeesCallback();
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// write data to request body
req.write(serialize(params));
req.end();
};
const processResize = (values, filePath, bucket, succeesCallback) => {
var resizeSmall = new Promise(getResizePromise(values[0].data, 130));
var resizeBig = new Promise(getResizePromise(values[0].data, 460));
var wFile = values[1].file;
function putObjects(params) {
return function (resolve, reject) {
var options = {partSize: 10 * 1024 * 1024, queueSize: 1};
s3.upload(params, options, function(err, data) {
if (err) {
console.log(err, err.stack);
reject(err);
} // an error occurred
else {// successful response
console.log("upload: ");
console.log(params);
resolve(data);
}
});
}
}
function getParams(file, size) {
var uploadParams = {
Bucket: config.thumbBucket,
Key: filePath.replace(/\.jpg/i, '-'+size+'.jpg'),
ACL: 'public-read'
};
uploadParams.Body = new Buffer(fs.readFileSync(file));
return uploadParams;
}
function processWatermark(resizedFiles) {
var id = '0';
try {
id = values[0].data.Metadata.id;
}catch(e) {
console.log(e);
}
var w1 = new Promise(getWatermarkPromise(resizedFiles[0], wFile, false));
var w2 = new Promise(getWatermarkPromise(resizedFiles[1], wFile, id));
Promise.all([w1, w2]).then(
function (watermarked) {
var uploadParams1 = getParams(watermarked[0], 130);
var uploadParams2 = getParams(watermarked[1], 460);
var u1 = new Promise(putObjects(uploadParams1));
var u2 = new Promise(putObjects(uploadParams2));
Promise.all([u1, u2]).then(function (results) {
sendRequest(values[0], results, filePath, bucket, succeesCallback);
}).catch(function (err) {
console.log(err);
});
}
).catch(
function (msg) {
console.log(msg);
}
);
}
Promise.all([resizeSmall, resizeBig]).then(
function (vals) {
processWatermark(vals);
}
).catch(function (err) {
console.log("resize error");
});
};
exports.handler = (event, context, callback) => {
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: key
};
console.log("start processing" + key);
var wparams = JSON.parse(JSON.stringify(params)); // deep cloning
wparams.Key = config.watermark;
var mainObject = new Promise(getObjectPromise(params, mainFile));
var watermarkObject = new Promise(getObjectPromise(wparams, watermarkFile));
Promise.all([mainObject, watermarkObject]).then(function (values) {
processResize(values, key, bucket, function () {
context.succeed();
});
}).catch(function (msg) {
console.log(msg);
})
};