I have a lambda function written in javascript (node) that takes a large payload, breaks it down, hits multiple APIs, builds the response, and returns.
The gist of the code looks like so:
'use strict';
let API_KEY = '';
exports.createObjects = (event, context, callback) => {
let promises = createObject(event);
Promise.all(promises).then(results => {
let res = [];
results.forEach((r) => {
res.push(r.obj);
});
callback(null, {"message" : res});
})
.catch((e) =>{
console.log('Error Creating obj ', e);
});
};
const createObject = (req) => {
let objects = req.body.obj;
let promiseReturn = [];
if(req.headers.Authorization === undefined)
return promiseReturn;
API_KEY = req.headers.Authorization;
objects.forEach((s) => {
let newPromise = create(s);
promiseReturn.push(newPromise);
});
return promiseReturn;
};
const create = (object) => {
return new Promise((resolve, reject) => {
let chain = createObject(s);
chain
.then((res)=>{
return updateObject(s);
})
.then((res)=>{
return createAttribute1(object.x, object.y);
})
.then((res)=>{
return createAttribute2(object.z, object.q);
})
.then((res)=>{
return createAttribute3(object.s, object.t);
})
.then((res)=>{
return createAttribute4(object.v, object.c);
})
.then((res)=>{
resolve(tempSurvey);
})
.catch((e)=>{
return rollBack(object);
})
});
};
I'm having problems dealing with concurrent request. For example if I hit the lambda function with a 10 mb payload it processes just fine, but If 3 users hit the lambda function with 10mb payloads at the same time i see the error "Endpoint request timed out".
What is the best way to "parallelize" this and move it out of the web server with lambda? I'm not sure how to approach this... I have increased the response time from 3 seconds to the maximum of 30 but this only buys a little time and doesn't fix the problem.
via Ross Bassett
No comments:
Post a Comment