Error: connect: An existing SFTP connection is already defined
See original GitHub issueIm getting the following error when using put with a lambda function in AWS.
Here is my code. What am I doing wrong?
const Client = require('ssh2-sftp-client');
const fs = require('fs');
const AWS = require('aws-sdk');
const client = new Client();
const s3 = new AWS.S3();
const connectionConfig = {
host: 'ec2-54-161-112-0.compute-1.amazonaws.com',
port: '22',
username: 'ubuntu',
privateKey: fs.readFileSync('ec2-allanoricilcos2.pem')
}
const putConfig = {
flags: 'w', // w - write and a - append
encoding: null, // use null for binary files
mode: 0o666, // mode to use for created file (rwx)
autoClose: true // automatically close the write stream when finished
};
exports.main = async (event, context) => {
console.log(JSON.stringify(event.Records));
await client.connect(connectionConfig);
const promissesToUploadObjectToSFTP = event.Records.map(record => {
const object = {
Bucket: record.s3.bucket.name,
Key: record.s3.object.key
};
const objectReadStream = s3.getObject(object)
.createReadStream()
.on('error', (error) =>{
console.log('Error reading file' + error);
});
return client.put(objectReadStream, `/home/ubuntu/${object.Key}`, putConfig);
});
console.log(promissesToUploadObjectToSFTP);
return Promise.all(promissesToUploadObjectToSFTP)
.then(() => {
console.debug('All files were uploaded to sftp');
client.end();
});
}
Issue Analytics
- State:
- Created 3 years ago
- Comments:8 (4 by maintainers)
Top Results From Across the Web
An existing SFTP connection is already defined - Stack Overflow
All I needed to do was move let sftp = new Client();. into function uploadScreenshot(). It's now working.
Read more >"close" event in connect not unsetting this.sftp #219 - GitHub
sftp on "close" or we get "'An existing SFTP connection is already defined" error when we try to reconnect. Thoughts on restoring it...
Read more >ssh2-sftp-client - npm
Clients first make an unauthenticated connection to the SFTP server to begin negotiation of protocol settings (cipher, authentication method etc) ...
Read more >What's wrong with my code for SFTP connection?
First, you cannot connect to an instance's own IMPEX directory from that instance. It is blocked at the network level. Second, SFCC servers...
Read more >Why does SFTP (FTP) Fail to Connect? - Servebolt.com
If your FTP program is not connecting properly, here are some possible reasons: Make sure you're using SFTP. That does not mean FTPS, ......
Read more >Top Related Medium Post
No results found
Top Related StackOverflow Question
No results found
Troubleshoot Live Code
Lightrun enables developers to add logs, metrics and snapshots to live code - no restarts or redeploys required.
Start FreeTop Related Reddit Thread
No results found
Top Related Hackernoon Post
No results found
Top Related Tweet
No results found
Top Related Dev.to Post
No results found
Top Related Hashnode Post
No results found
Top GitHub Comments
There is no explicit limit in ssh2-sftp-client. However, it is not uncommon for remote sftp servers to limit the number of connections from an IP address within a certain period of time and it is not uncommon for cloud service providers to put limits on number of connections, memory, processes etc. Could easily be some type of limiting imposed by lambda or S3.
That is a little difficult because I don’t have the full context on how the code is being used. Is this a web context where the main function could be called asynchronously? If so, then you would need to move the new Client() call into the main function (to create a new client object for each call).
Looking more closely at your code, it is likely the .map() use was the source of the error. Using these methods with async/await is quite tricky and it is easy to get bugs wrt await.
Note also that Promise.all() returns an array where each element is the outcome from each promise - if any upload fails, the corresponding element will have a reject. (I was wrong when I said yo didn’t need the Promise.all() - misread the code. Therefore, you need to check the value of each element in the array to verify the file upload was successful.
‘use strict’;
const Client = require(‘ssh2-sftp-client’); const fs = require(‘fs’); const AWS = require(‘aws-sdk’);
const client = new Client(); const s3 = new AWS.S3();
const connectionConfig = { host: ‘ec2-54-161-112-0.compute-1.amazonaws.com’, port: ‘22’, username: ‘ubuntu’, privateKey: fs.readFileSync(‘ec2-allanoricilcos2.pem’) };
// don’t need this as these are the defaults // const putConfig = { // flags: ‘w’, // w - write and a - append // encoding: null, // use null for binary files // mode: 0o666, // mode to use for created file (rwx) // autoClose: true // automatically close the write stream when finished // };
exports.main = async (event, context) => { console.log(JSON.stringify(event.Records));
await client.connect(connectionConfig);
// Don’t use .map - while it is possible to use .map() et. al. with // async/await, you need to jump through additional hoops to ensure // it works correctly that end up muddying the code IMO // const promissesToUploadObjectToSFTP = event.Records.map((record) => { // const object = { // Bucket: record.s3.bucket.name, // Key: record.s3.object.key // };
// const objectReadStream = s3 // .getObject(object) // .createReadStream() // .on(‘error’, (error) => { // console.log(‘Error reading file’ + error); // });
// return client.put( // objectReadStream, //
/home/ubuntu/${object.Key}
, // putConfig // ); // });let promiseArray = []; for (let record of event.Records) { let object = { Bucket: record.s3.bucket.name, Key: record.s3.object.key }; let objectReadStream = s3 .getObject(object) .createReadStream() .on(‘error’, (error) => { console.log(‘Error reading file’ + error); }); promiseArray.push( client.put(objectReadStream,
/home/ubuntu/${object.key}
) ); }return Promise.all(promiseArray) .then((rslts) => { console.dir(rslts); return client.end(); }) .then(() => { console.debug(‘All files were uploaded to sftp server’); }); };
The above is untested of course.