Skip to content

Commit

Permalink
Adding optimize toggle to the parser options, adding test for use case (
Browse files Browse the repository at this point in the history
  • Loading branch information
eXigentCoder authored Sep 16, 2024
2 parents f75bd94 + d976b13 commit 8b72d9a
Show file tree
Hide file tree
Showing 7 changed files with 274 additions and 46 deletions.
45 changes: 22 additions & 23 deletions lib/make/optimize-join-and-where.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ const {get, isEqual, cloneDeep} = require('lodash');
* @typedef {import('../types').OptimizationProcessResult} ProcessResult
*/

const optimizationsEnabled = false;
/**
*
* @param {import('../types').PipelineFn[]} pipeline
Expand All @@ -18,9 +17,9 @@ const optimizationsEnabled = false;
* @returns {void}
*/
function optimizeJoinAndWhere(pipeline, pipeLineJoin, wherePiece, context) {
if (!optimizationsEnabled || context.joinHints.includes('nooptimize')) {
if (!context.optimizeJoins || context.joinHints.includes('nooptimize')) {
pushToPipeline({
wasOptimised: false,
wasOptimized: false,
leftOverMatches: [],
});
return;
Expand All @@ -35,7 +34,7 @@ function optimizeJoinAndWhere(pipeline, pipeLineJoin, wherePiece, context) {
(!context.joinHints || !context.joinHints.includes('nooptimize'))
) {
try {
const result = recursivelyOptimise(
const result = recursivelyOptimize(
wherePiece.$match,
lookup,
pipeline,
Expand All @@ -48,25 +47,25 @@ function optimizeJoinAndWhere(pipeline, pipeLineJoin, wherePiece, context) {
clearObjectAndCopyToIt(pipeLineJoin, originalPipelineJoin);
clearObjectAndCopyToIt(wherePiece, originalWherePiece);
pushToPipeline({
wasOptimised: false,
wasOptimized: false,
leftOverMatches: [],
});
}
} else {
pushToPipeline({
wasOptimised: false,
wasOptimized: false,
leftOverMatches: [],
});
}

/**
* @param { OptimizationProcessResult } result
* @param { Pick<ProcessResult,'leftOverMatches'|'wasOptimized'> } result
*/
function pushToPipeline({leftOverMatches, wasOptimised}) {
function pushToPipeline({leftOverMatches, wasOptimized}) {
for (const join of pipeLineJoin) {
pipeline.push(join);
}
if (!wasOptimised && wherePiece) {
if (!wasOptimized && wherePiece) {
pipeline.push(wherePiece);
return;
}
Expand All @@ -88,7 +87,7 @@ function optimizeJoinAndWhere(pipeline, pipeLineJoin, wherePiece, context) {
* @param {ProcessResult} [parentResult]
* @returns {ProcessResult} the leftover matches to add to the end
*/
function recursivelyOptimise(
function recursivelyOptimize(
match,
lookup,
pipeline,
Expand Down Expand Up @@ -171,7 +170,7 @@ function recursivelyOptimise(
if (miscMatches.length > 0) {
mergeProcessResults(
{
wasOptimised: false,
wasOptimized: false,
leftOverMatches: miscMatches,
lookupPipelineStagesAdded: [],
pipelineStagesAdded: [],
Expand Down Expand Up @@ -277,7 +276,7 @@ function processMatches(
const returnResult = newReturnResult();
const results = [];
for (const match of matches) {
const res = recursivelyOptimise(
const res = recursivelyOptimize(
match,
lookup,
pipeline,
Expand All @@ -303,7 +302,7 @@ function processMatches(
* @param {ProcessResult} currentResult
* @param {MatchType} parentMatchType
* @param {boolean} addSingle
* @returns {ProcessResult} wasOptimised
* @returns {ProcessResult} wasOptimized
*/
function processSourceMatches(
sourceMatches,
Expand All @@ -325,7 +324,7 @@ function processSourceMatches(
target[parentMatchType].push({[matchType]: [...sourceMatches]});
}
clearArray(sourceMatches);
returnResult.wasOptimised = true;
returnResult.wasOptimized = true;
return returnResult;
}
let newStage;
Expand All @@ -351,7 +350,7 @@ function processSourceMatches(
pipeline.push(newStage);
returnResult.pipelineStagesAdded.push(newStage);
clearArray(sourceMatches);
returnResult.wasOptimised = true;
returnResult.wasOptimized = true;
return returnResult;

/**
Expand Down Expand Up @@ -404,12 +403,12 @@ function processDestinationMatches(
if (destinationMatches.length === 0 && leftOverSourceMatches.length === 0) {
return returnResult;
}
// ensures the lookup is in the right format to be optimised
// ensures the lookup is in the right format to be optimized
if (!lookup.$lookup.pipeline) {
convertLookupToPipeline(lookup);
} else {
if (currentResult.lookupPipelineStagesAdded === 0) {
// only optimise the pipeline if we haven't already done int
// only optimize the pipeline if we haven't already done int
optimizeExistingPipeline(lookup);
}
}
Expand All @@ -435,12 +434,12 @@ function processDestinationMatches(
} else {
target.push(...mappedMatches);
}
returnResult.wasOptimised = true;
returnResult.wasOptimized = true;
return returnResult;
}
const newStage = getStage();
lookup.$lookup.pipeline.unshift(newStage);
returnResult.wasOptimised = true;
returnResult.wasOptimized = true;
returnResult.lookupPipelineStagesAdded.push(newStage);
return returnResult;

Expand Down Expand Up @@ -518,7 +517,7 @@ function processDestinationMatches(
*/
function optimizeExistingPipeline(lookup) {
const matches = lookup.$lookup.pipeline.filter((p) => !!p.$match);
// optimise the existing pipeline
// optimize the existing pipeline
let indexesToDelete = [];
for (const match of matches) {
if (match.$match.$expr.$and) {
Expand Down Expand Up @@ -649,7 +648,7 @@ function mapMatchesToExpressionFormat(match, sourceName, explicitKey) {
*/
function newReturnResult() {
return {
wasOptimised: false,
wasOptimized: false,
leftOverMatches: [],
pipelineStagesAdded: [],
lookupPipelineStagesAdded: [],
Expand All @@ -661,8 +660,8 @@ function newReturnResult() {
* @param {ProcessResult} destination
*/
function mergeProcessResults(source, destination) {
if (source.wasOptimised) {
destination.wasOptimised = true;
if (source.wasOptimized) {
destination.wasOptimized = true;
}
for (const leftOverMatch of source.leftOverMatches) {
const exists = destination.leftOverMatches.some((match) =>
Expand Down
4 changes: 3 additions & 1 deletion lib/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,8 @@ export interface ParserOptions {
unsetId?: boolean;
/** If provided, the library will use the schemas to generate better queries */
schemas?: Schemas;
/** If true, will optimize the join for better performance */
optimizeJoins?: boolean;
}

export interface NoqlContext extends ParserOptions {
Expand Down Expand Up @@ -347,7 +349,7 @@ export interface FindSchemaResult {
}

export interface OptimizationProcessResult {
wasOptimised: boolean;
wasOptimized: boolean;
pipelineStagesAdded: PipelineFn[];
lookupPipelineStagesAdded: PipelineFn[];
leftOverMatches: Record<string, unknown>[];
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@synatic/noql",
"version": "4.1.8",
"version": "4.1.9",
"description": "Convert SQL statements to mongo queries or aggregates",
"main": "index.js",
"files": [
Expand Down
3 changes: 3 additions & 0 deletions test/optimizations/optimizations.json
Original file line number Diff line number Diff line change
Expand Up @@ -1044,5 +1044,8 @@
}
]
}
},
"use-cases": {
"case-1": {}
}
}
Loading

0 comments on commit 8b72d9a

Please sign in to comment.