Monday, 15 May 2017

Limit and offset for knex query build

I have a problem with building a batch update query. To prevent timeout I have to split my larger query into several smaller ones.

The problem is on the else clause of if (payload.fieldName == 'status_id'). Although query.limit(payload.limit); query.offset((payload.page - 1) * payload.limit); receive correct values of limit and page when inspecting a run trough the code I get results in then as I don't have the limit and offset in place.

I would like to know ho to make the query builder accept my limit and offset in the update situation.

I am a novice to node.js/knex/Bookshelf so I will be thankful for any other suggestions regarding this approach.

module.updateAllIssuesByProjectId = function(filter, id, done, payload, timezone, userId, userGroupId) {
    Bookshelf.transaction(function(t) {
        var updateJson = {};
        var query = Issue.query();
        updateJson[payload.fieldName] = payload.fieldValue;
        updateJson['user_id'] = userId;
        updateJson['update_date'] = new Date();
        if (payload.fieldName == 'status_id') {
            query = filterService.issueFilterJoinClause(query, filter, id, timezone, payload);
            query.leftJoin('issues_statuses_groups', function() {
                this.on('issues_statuses_groups.current_status_id', 'issues.status_id')
                    .on('issues_statuses_groups.open_by_group_id', 'issues.opened_by_group_id')
                    .on('issues_statuses_groups.user_group_id', userGroupId)
                    .on('issues_statuses_groups.next_status_id', '<>', 99)
            });
            query.where(filter);
            query = filterService.issueFilterWhereClause(query, filter, id, timezone, payload);
            query.limit(payload.limit);
            query.offset((payload.page - 1) * payload.limit);
            query.orderBy('id', 'asc');
        } else {
            query = filterService.issueFilterJoinClause(query, filter, id, timezone, payload);
            query.where(filter);
            query = filterService.issueFilterWhereClause(query, filter, id, timezone, payload);
            query.limit(payload.limit);
            query.offset((payload.page - 1) * payload.limit);
            query.orderBy('id', 'asc');
            query.update(updateJson).transacting(t);
        }
        return query;
    }).then(function(issues) {
        var updateList = [], originalValues = [];
        if (payload.fieldName == 'status_id') {
            issues.map(function(issue) {
                var updateObject = {}, originalObject = {};
                if (payload.fieldValue == 6 && issue.next_status_id) {
                    updateObject[payload.fieldName] = issue.next_status_id;
                    updateObject['id'] = issue.id;
                    updateList.push(updateObject);
                } else if (payload.fieldValue == 2 && issue.prev_status_id) {
                    updateObject['id'] = issue.id;
                    updateObject[payload.fieldName] = issue.prev_status_id;
                    updateList.push(updateObject);
                } else if (payload.fieldValue == 7) {
                    updateObject['id'] = issue.id;
                    updateObject['is_deleted'] = true;
                    updateList.push(updateObject)
                }
            });

            module.updateList(updateList, function() {
                done(null, originalValues);
            }, userId);
        } else {
            done(null, issues);
        }

    }).catch(function(err) {
        err.method = "issuesService - updateAllIssuesByProjectId";
        done(err, null);
    });
};



via Marius T

No comments:

Post a Comment