Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Remove] LegacyESVersion.V_7_6_* and V_7_7_* constants #4837

Merged
merged 5 commits into from
Nov 1, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Remove unused private methods ([#4926](https:/opensearch-project/OpenSearch/pull/4926))
- Revert PR 4656 to unblock Windows CI ([#4949](https:/opensearch-project/OpenSearch/pull/4949))
- Remove LegacyESVersion.V_7_8_ and V_7_9_ Constants ([#4855](https:/opensearch-project/OpenSearch/pull/4855))
- Remove LegacyESVersion.V_7_6_ and V_7_7_ Constants ([#4837](https:/opensearch-project/OpenSearch/pull/4837))

### Fixed
- `opensearch-service.bat start` and `opensearch-service.bat manager` failing to run ([#4289](https:/opensearch-project/OpenSearch/pull/4289))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
import org.apache.lucene.analysis.util.ElisionFilter;
import org.apache.lucene.util.SetOnce;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.Client;
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
import org.opensearch.cluster.service.ClusterService;
Expand Down Expand Up @@ -347,7 +347,12 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new);
tokenizers.put("thai", ThaiTokenizerFactory::new);
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [nGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
Expand All @@ -358,7 +363,12 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
});
tokenizers.put("ngram", NGramTokenizerFactory::new);
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
Expand Down Expand Up @@ -606,7 +616,12 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {

// Temporary shim for aliases. TODO deprecate after they are moved
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("nGram", (version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_6_0)) {
if (version.onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [nGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
Expand All @@ -616,7 +631,12 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
return new NGramTokenizer();
}));
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edgeNGram", (version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_6_0)) {
if (version.onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
Expand All @@ -24,11 +23,6 @@
* max_graph_expansions is 100 as the default value of 10_000 seems to be unnecessarily large and preserve_separator is false.
*
* <ul>
* <li>preserve_separator:
* For LegacyESVersion lesser than {@link LegacyESVersion#V_7_6_0} i.e. lucene versions lesser
* than {@link org.apache.lucene.util.Version#LUCENE_8_4_0}
* Whether {@link ConcatenateGraphFilter#SEP_LABEL} should separate the input tokens in the concatenated token.
* </li>
* <li>token_separator:
* Separator to use for concatenation. Must be a String with a single character or empty.
* If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_TOKEN_SEPARATOR} will be used.
Expand Down Expand Up @@ -59,17 +53,11 @@ public class ConcatenateGraphTokenFilterFactory extends AbstractTokenFilterFacto
ConcatenateGraphTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);

if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) { // i.e. Lucene 8.4.0
String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR);
if (separator.length() > 1) {
throw new IllegalArgumentException("token_separator must be either empty or a single character");
}
tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating
} else {
boolean preserveSep = settings.getAsBoolean("preserve_separator", ConcatenateGraphFilter.DEFAULT_PRESERVE_SEP);
tokenSeparator = preserveSep ? ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR : null;
String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR);
if (separator.length() > 1) {
throw new IllegalArgumentException("token_separator must be either empty or a single character");
}

tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating
maxGraphExpansions = settings.getAsInt("max_graph_expansions", DEFAULT_MAX_GRAPH_EXPANSIONS);
preservePositionIncrements = settings.getAsBoolean("preserve_position_increments", DEFAULT_PRESERVE_POSITION_INCREMENTS);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
package org.opensearch.geo.search.aggregations.bucket.composite;

import org.apache.lucene.index.IndexReader;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.ParseField;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoPoint;
Expand Down Expand Up @@ -175,9 +174,7 @@ public static void register(ValuesSourceRegistry.Builder builder) {
public GeoTileGridValuesSourceBuilder(StreamInput in) throws IOException {
super(in);
this.precision = in.readInt();
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
this.geoBoundingBox = new GeoBoundingBox(in);
}
this.geoBoundingBox = new GeoBoundingBox(in);
}

public GeoTileGridValuesSourceBuilder precision(int precision) {
Expand All @@ -198,9 +195,7 @@ public GeoTileGridValuesSourceBuilder format(String format) {
@Override
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeInt(precision);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
geoBoundingBox.writeTo(out);
}
geoBoundingBox.writeTo(out);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.geo.search.aggregations.bucket.geogrid;

import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
import org.opensearch.common.ParseField;
import org.opensearch.common.geo.GeoBoundingBox;
Expand Down Expand Up @@ -125,9 +124,7 @@ public GeoGridAggregationBuilder(StreamInput in) throws IOException {
precision = in.readVInt();
requiredSize = in.readVInt();
shardSize = in.readVInt();
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
geoBoundingBox = new GeoBoundingBox(in);
}
geoBoundingBox = new GeoBoundingBox(in);
}

@Override
Expand All @@ -140,9 +137,7 @@ protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeVInt(precision);
out.writeVInt(requiredSize);
out.writeVInt(shardSize);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
geoBoundingBox.writeTo(out);
}
geoBoundingBox.writeTo(out);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.index.rankeval;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequest;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.IndicesRequest;
Expand Down Expand Up @@ -69,9 +68,7 @@ public RankEvalRequest(RankEvalSpec rankingEvaluationSpec, String[] indices) {
rankingEvaluationSpec = new RankEvalSpec(in);
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
searchType = SearchType.fromId(in.readByte());
}
searchType = SearchType.fromId(in.readByte());
}

RankEvalRequest() {}
Expand Down Expand Up @@ -150,9 +147,7 @@ public void writeTo(StreamOutput out) throws IOException {
rankingEvaluationSpec.writeTo(out);
out.writeStringArray(indices);
indicesOptions.writeIndicesOptions(out);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
out.writeByte(searchType.id());
}
out.writeByte(searchType.id());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -720,12 +720,8 @@ public void testAutoExpandIndicesDuringRollingUpgrade() throws Exception {

final int numberOfReplicas = Integer.parseInt(
getIndexSettingsAsMap(indexName).get(IndexMetadata.SETTING_NUMBER_OF_REPLICAS).toString());
if (minimumNodeVersion.onOrAfter(LegacyESVersion.V_7_6_0)) {
assertEquals(nodes.size() - 2, numberOfReplicas);
ensureGreen(indexName);
} else {
assertEquals(nodes.size() - 1, numberOfReplicas);
}
assertEquals(nodes.size() - 2, numberOfReplicas);
ensureGreen(indexName);
}

public void testSoftDeletesDisabledWarning() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,14 @@ public void testIndexCanChangeCustomDataPath() throws Exception {
final Path indexDataPath = sharedDataPath.resolve("start-" + randomAsciiLettersOfLength(10));

logger.info("--> creating index [{}] with data_path [{}]", index, indexDataPath);
createIndex(index, Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, indexDataPath.toAbsolutePath().toString()).build());
createIndex(
index,
Settings.builder()
.put(IndexMetadata.SETTING_DATA_PATH, indexDataPath.toAbsolutePath().toString())
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST)
.put(IndexSettings.INDEX_MERGE_ON_FLUSH_ENABLED.getKey(), false)
.build()
);
client().prepareIndex(index).setId("1").setSource("foo", "bar").setRefreshPolicy(IMMEDIATE).get();
ensureGreen(index);

Expand All @@ -307,6 +314,16 @@ public void testIndexCanChangeCustomDataPath() throws Exception {
logger.info("--> closing the index [{}] before updating data_path", index);
assertAcked(client().admin().indices().prepareClose(index).setWaitForActiveShards(ActiveShardCount.DEFAULT));

// race condition: async flush may cause translog file deletion resulting in an inconsistent stream from
// Files.walk below during copy phase
// temporarily disable refresh to avoid any flushes or syncs that may inadvertently cause the deletion
assertAcked(
client().admin()
.indices()
.prepareUpdateSettings(index)
.setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "-1").build())
);

final Path newIndexDataPath = sharedDataPath.resolve("end-" + randomAlphaOfLength(10));
IOUtils.rm(newIndexDataPath);

Expand All @@ -326,11 +343,17 @@ public void testIndexCanChangeCustomDataPath() throws Exception {
}

logger.info("--> updating data_path to [{}] for index [{}]", newIndexDataPath, index);
// update data path and re-enable refresh
assertAcked(
client().admin()
.indices()
.prepareUpdateSettings(index)
.setSettings(Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, newIndexDataPath.toAbsolutePath().toString()).build())
.setSettings(
Settings.builder()
.put(IndexMetadata.SETTING_DATA_PATH, newIndexDataPath.toAbsolutePath().toString())
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), IndexSettings.DEFAULT_REFRESH_INTERVAL.toString())
.build()
)
.setIndicesOptions(IndicesOptions.fromOptions(true, false, true, true))
);

Expand Down
5 changes: 0 additions & 5 deletions server/src/main/java/org/opensearch/LegacyESVersion.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,6 @@
*/
public class LegacyESVersion extends Version {

public static final LegacyESVersion V_7_6_0 = new LegacyESVersion(7060099, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_6_1 = new LegacyESVersion(7060199, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_6_2 = new LegacyESVersion(7060299, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_7_0 = new LegacyESVersion(7070099, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_7_1 = new LegacyESVersion(7070199, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_10_0 = new LegacyESVersion(7100099, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final LegacyESVersion V_7_10_1 = new LegacyESVersion(7100199, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final LegacyESVersion V_7_10_2 = new LegacyESVersion(7100299, org.apache.lucene.util.Version.LUCENE_8_7_0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.action.admin.cluster.node.info;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
Expand Down Expand Up @@ -63,22 +62,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
public NodesInfoRequest(StreamInput in) throws IOException {
super(in);
requestedMetrics.clear();
if (in.getVersion().before(LegacyESVersion.V_7_7_0)) {
// prior to version 8.x, a NodesInfoRequest was serialized as a list
// of booleans in a fixed order
optionallyAddMetric(in.readBoolean(), Metric.SETTINGS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.OS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.PROCESS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.JVM.metricName());
optionallyAddMetric(in.readBoolean(), Metric.THREAD_POOL.metricName());
optionallyAddMetric(in.readBoolean(), Metric.TRANSPORT.metricName());
optionallyAddMetric(in.readBoolean(), Metric.HTTP.metricName());
optionallyAddMetric(in.readBoolean(), Metric.PLUGINS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.INGEST.metricName());
optionallyAddMetric(in.readBoolean(), Metric.INDICES.metricName());
} else {
requestedMetrics.addAll(Arrays.asList(in.readStringArray()));
}
requestedMetrics.addAll(Arrays.asList(in.readStringArray()));
}

/**
Expand Down Expand Up @@ -165,22 +149,7 @@ private void optionallyAddMetric(boolean addMetric, String metricName) {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().before(LegacyESVersion.V_7_7_0)) {
// prior to version 8.x, a NodesInfoRequest was serialized as a list
// of booleans in a fixed order
out.writeBoolean(Metric.SETTINGS.containedIn(requestedMetrics));
out.writeBoolean(Metric.OS.containedIn(requestedMetrics));
out.writeBoolean(Metric.PROCESS.containedIn(requestedMetrics));
out.writeBoolean(Metric.JVM.containedIn(requestedMetrics));
out.writeBoolean(Metric.THREAD_POOL.containedIn(requestedMetrics));
out.writeBoolean(Metric.TRANSPORT.containedIn(requestedMetrics));
out.writeBoolean(Metric.HTTP.containedIn(requestedMetrics));
out.writeBoolean(Metric.PLUGINS.containedIn(requestedMetrics));
out.writeBoolean(Metric.INGEST.containedIn(requestedMetrics));
out.writeBoolean(Metric.INDICES.containedIn(requestedMetrics));
} else {
out.writeStringArray(requestedMetrics.toArray(new String[0]));
}
out.writeStringArray(requestedMetrics.toArray(new String[0]));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.action.admin.cluster.node.reload;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;

Expand Down Expand Up @@ -68,18 +67,16 @@ public NodesReloadSecureSettingsRequest() {

public NodesReloadSecureSettingsRequest(StreamInput in) throws IOException {
super(in);
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_7_0)) {
final BytesReference bytesRef = in.readOptionalBytesReference();
if (bytesRef != null) {
byte[] bytes = BytesReference.toBytes(bytesRef);
try {
this.secureSettingsPassword = new SecureString(CharArrays.utf8BytesToChars(bytes));
} finally {
Arrays.fill(bytes, (byte) 0);
}
} else {
this.secureSettingsPassword = null;
final BytesReference bytesRef = in.readOptionalBytesReference();
if (bytesRef != null) {
byte[] bytes = BytesReference.toBytes(bytesRef);
try {
this.secureSettingsPassword = new SecureString(CharArrays.utf8BytesToChars(bytes));
} finally {
Arrays.fill(bytes, (byte) 0);
}
} else {
this.secureSettingsPassword = null;
}
}

Expand Down
Loading