Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Remove unnecessary looping in field data cache clear ([#19116](https://github.com/opensearch-project/OpenSearch/pull/19116))
- [Flaky Test] Fix flaky test IngestFromKinesisIT.testAllActiveIngestion ([#19380](https://github.com/opensearch-project/OpenSearch/pull/19380))
- Fix lag metric for pull-based ingestion when streaming source is empty ([#19393](https://github.com/opensearch-project/OpenSearch/pull/19393))
- Fix IntervalQuery flaky test ([#19332](https://github.com/opensearch-project/OpenSearch/pull/19332))
- Fix ingestion state xcontent serialization in IndexMetadata and fail fast on mapping errors([#19320](https://github.com/opensearch-project/OpenSearch/pull/19320))
- Fix updated keyword field params leading to stale responses from request cache ([#19385](https://github.com/opensearch-project/OpenSearch/pull/19385))
- Implement SslHandler retrieval logic for transport-reactor-netty4 plugin ([#19458](https://github.com/opensearch-project/OpenSearch/pull/19458))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,20 @@ protected IntervalsSource analyzeTerm(TokenStream ts) throws IOException {
return Intervals.term(BytesRef.deepCopyOf(bytesAtt.getBytesRef()));
}

static boolean canCombineSources(List<IntervalsSource> sources) {
int sourceIndex = 0;
long disjunctionCount = 1;

while (sourceIndex < sources.size()) {
disjunctionCount = disjunctionCount * sources.get(sourceIndex).pullUpDisjunctions().size();
if (disjunctionCount > IndexSearcher.getMaxClauseCount()) {
return false;
}
sourceIndex += 1;
}
return true;
}

protected static IntervalsSource combineSources(List<IntervalsSource> sources, int maxGaps, IntervalMode mode) {
if (sources.size() == 0) {
return NO_INTERVALS;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,9 @@ public IntervalsSource getSource(QueryShardContext ctx, MappedFieldType fieldTyp
for (IntervalsSourceProvider provider : subSources) {
ss.add(provider.getSource(ctx, fieldType));
}
if (maxGaps == 0 && mode == IntervalMode.ORDERED && IntervalBuilder.canCombineSources(ss) == false) {
throw new IllegalArgumentException("Too many disjunctions to expand");
}
IntervalsSource source = IntervalBuilder.combineSources(ss, maxGaps, mode);
if (filter != null) {
return filter.filter(source, ctx, fieldType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,17 @@

package org.opensearch.index.query;

import org.apache.lucene.queries.intervals.Intervals;
import org.apache.lucene.queries.intervals.IntervalsSource;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.core.common.io.stream.Writeable;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.index.query.IntervalsSourceProvider.Combine;
import org.opensearch.search.SearchModule;
import org.opensearch.test.AbstractSerializingTestCase;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import static org.opensearch.index.query.IntervalsSourceProvider.Combine;
Expand Down Expand Up @@ -104,4 +108,28 @@ protected Combine doParseInstance(XContentParser parser) throws IOException {
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
return combine;
}

public void testCanCombineSourcesFail() {
List<IntervalsSource> sources = new ArrayList<>();

for (int i = 0; i < 11; i++) {
IntervalsSource source1 = Intervals.maxgaps(0, Intervals.ordered(Intervals.term("term_" + 2 * i)));
IntervalsSource source2 = Intervals.maxgaps(0, Intervals.ordered(Intervals.term("term_" + (2 * i + 1))));
sources.add(Intervals.or(source1, source2));
}

assertFalse(IntervalBuilder.canCombineSources(sources));
}

public void testCanCombineSourcesSuccess() {
List<IntervalsSource> sources = new ArrayList<>();

for (int i = 0; i < 10; i++) {
IntervalsSource source1 = Intervals.maxgaps(0, Intervals.ordered(Intervals.term("term_" + 2 * i)));
IntervalsSource source2 = Intervals.maxgaps(0, Intervals.ordered(Intervals.term("term_" + (2 * i + 1))));
sources.add(Intervals.or(source1, source2));
}

assertTrue(IntervalBuilder.canCombineSources(sources));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,13 @@ public void testToQuery() throws IOException {
* We do it this way in SearchService where
* we first rewrite the query with a private context, then reset the context and then build the actual lucene query*/
QueryBuilder rewritten = rewriteQuery(firstQuery, new QueryShardContext(context));
Query firstLuceneQuery = rewritten.toQuery(context);
Query firstLuceneQuery;
try {
firstLuceneQuery = rewritten.toQuery(context);
} catch (IllegalArgumentException e) {
assertEquals("Too many disjunctions to expand", e.getMessage());
continue;
}
assertNotNull("toQuery should not return null", firstLuceneQuery);
assertLuceneQuery(firstQuery, firstLuceneQuery, context);
// remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well
Expand Down
Loading