If you're having memory leaks/bloating
See original GitHub issueTry running your sharp image processing in a child_process spawn. I’ve seen lots of posts here that generally conclude it’s something node-related with RSS memory. I can’t say one way or another if this is the case, and I’ve generally seen multiple suggestions to fix it in my search for a solution, some seem to work, others don’t - this worked for me.
In my case running sharp in a server/express is where I saw the bloat. I’m using multer to upload image files and then attempting to compress/reduce them on the fly. Since multer stores the images and spits out an array of file locations, I just passed the array into my child process as a JSON string param.
// express route
var multer = require('multer');
var upload = multer({dest:'uploads/tmp'});
app.post(['/upload/:store/:name', '/form/upload/:store/:name'], upload.array('images', 6), function(req, res){
var manifest = {shop: req.params.store, name: req.params.name, images: req.files};
// Spawn node child process to handle image manipulation
var spawn = require('child_process').spawn;
var compressor = spawn('node', ['./modules/compression', JSON.stringify(manifest)]);
// When the child process handles the images successfully it will emit the array of image locations for us to send to the front end.
compressor.stdout.on('data', (data)=>{
data = JSON.parse(data.toString());
res.json(data);
});
// If there's an error, we'll see it.
compressor.stderr.on('data', (err)=>{
console.log(err.toString());
res.status(500).send();
});
});
…My image processing module spawned in the child process…
var fs = require('fs');
var mkdirp = require('mkdirp');
var image = require('./images');
var manifest = JSON.parse(process.argv[2]);
var promises = [];
// Generate promise array loaded with image handling promises
manifest.images.forEach(function(cur){
promises.push(image.adjustImage(cur));
});
// Iterate through all the promises for the uploaded images, then execute code when done.
Promise.all(promises).then((paths)=>{
var dir = `./uploads/${manifest.shop}/${manifest.name}`;
// If destination path does not exist, make it so.
if (!fs.existsSync(dir)) {
mkdirp.sync(dir, {mode:0777});
}
// Delete old image files in destination folder
oldfiles = fs.readdirSync(dir);
oldfiles.forEach(function(cur){
fs.unlinkSync(`${dir}/${cur}`);
});
// For each of the newly uploaded image files, move them to the destination folder.
paths.forEach(function(cur){
fileName = cur.split('/').pop();
fs.renameSync(`./${cur}`, `${dir}/${fileName}`);
});
// Send array of image files to front end.
var newPaths = fs.readdir(dir, function(err, files){
files = files.map(function(cur){
return `${dir}/${cur}`.slice(1);
});
process.stdout.write(JSON.stringify(files));
process.exit();
});
}).catch(function(err){
console.log(err);
process.exit(1);
});
…The image.js module where the sharp code and promise chain is constructed. Pardon the many files, this all needs refactoring.
var sharp = require('sharp');
var imageSize = require('image-size');
var fs = require('fs');
function renameImage(img) {
function pruneExtension (name) {
var ext = name.split('.');
ext = ext[ext.length - 1];
return `.${ext}`;
}
img.extension = pruneExtension(img.originalname);
img.oldPath = img.path;
img.newPath = `${img.oldPath}${img.extension}`;
fs.renameSync(img.oldPath, img.newPath);
}
module.exports = {
adjustImage: function(img){
renameImage(img);
return new Promise(function(resolve, reject){
var size = imageSize(img.newPath);
if (size.width < 1000) {
resolve(img.newPath);
} else {
sharp(img.newPath).resize(1000, null).jpeg({
quality: 40
}).toFile(`${img.oldPath}-sharp.jpg`, (err, output) => {
if (err) {
reject(err);
} else {
fs.unlinkSync(`${img.oldPath}${img.extension}`);
img.newPath = `${img.oldPath}-sharp.jpg`;
img.extension ='.jpg';
resolve(img.newPath);
}
});
}
});
}
};
Hope this concept helps people who are stuck on memory problems. Sharp is amazing, lets keep using it!
Issue Analytics
- State:
- Created 6 years ago
- Reactions:6
- Comments:5 (1 by maintainers)
Top GitHub Comments
Other way is use jemalloc by setting LD_PRELOAD environment variable.
Debian example:
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1 node ...
After this change, total memory usage has dropped from “out off memory” to less than 200M per server instance.
If someone has problems with using libjemalloc1 on debian 10 (buster), its now version 2. Eg.
/usr/lib/x86_64-linux-gnu/libjemalloc.so.2