Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert commits back to after 2.11 release #2423

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ repositories {
dependencies {
api "org.antlr:antlr4-runtime:4.7.1"
api group: 'com.google.guava', name: 'guava', version: '32.0.1-jre'
api group: 'org.apache.logging.log4j', name: 'log4j-core', version:"${versions.log4j}"
api group: 'org.apache.logging.log4j', name: 'log4j-core', version:'2.20.0'
api group: 'org.apache.commons', name: 'commons-lang3', version: '3.12.0'
api group: 'com.squareup.okhttp3', name: 'okhttp', version: '4.9.3'
implementation 'com.github.babbel:okhttp-aws-signer:1.0.2'
Expand Down Expand Up @@ -63,4 +63,4 @@ configurations.all {
resolutionStrategy.force "org.apache.httpcomponents:httpcore:4.4.13"
resolutionStrategy.force "joda-time:joda-time:2.10.12"
resolutionStrategy.force "org.slf4j:slf4j-api:1.7.36"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,11 @@ public enum Key {
QUERY_SIZE_LIMIT("plugins.query.size_limit"),
ENCYRPTION_MASTER_KEY("plugins.query.datasources.encryption.masterkey"),
DATASOURCES_URI_HOSTS_DENY_LIST("plugins.query.datasources.uri.hosts.denylist"),
DATASOURCES_LIMIT("plugins.query.datasources.limit"),

METRICS_ROLLING_WINDOW("plugins.query.metrics.rolling_window"),
METRICS_ROLLING_INTERVAL("plugins.query.metrics.rolling_interval"),
SPARK_EXECUTION_ENGINE_CONFIG("plugins.query.executionengine.spark.config"),
CLUSTER_NAME("cluster.name"),
SPARK_EXECUTION_SESSION_LIMIT("plugins.query.executionengine.spark.session.limit"),
SPARK_EXECUTION_REFRESH_JOB_LIMIT("plugins.query.executionengine.spark.refresh_job.limit"),
SESSION_INDEX_TTL("plugins.query.executionengine.spark.session.index.ttl"),
RESULT_INDEX_TTL("plugins.query.executionengine.spark.result.index.ttl"),
AUTO_INDEX_MANAGEMENT_ENABLED(
"plugins.query.executionengine.spark.auto_index_management.enabled");
CLUSTER_NAME("cluster.name");

@Getter private final String keyValue;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

package org.opensearch.sql.datasource;

import java.util.Map;
import java.util.Set;
import org.opensearch.sql.datasource.model.DataSource;
import org.opensearch.sql.datasource.model.DataSourceMetadata;
Expand Down Expand Up @@ -57,19 +56,12 @@ public interface DataSourceService {
void createDataSource(DataSourceMetadata metadata);

/**
* Updates {@link DataSource} corresponding to dataSourceMetadata (all fields needed).
* Updates {@link DataSource} corresponding to dataSourceMetadata.
*
* @param dataSourceMetadata {@link DataSourceMetadata}.
*/
void updateDataSource(DataSourceMetadata dataSourceMetadata);

/**
* Patches {@link DataSource} corresponding to the given name (only fields to be changed needed).
*
* @param dataSourceData
*/
void patchDataSource(Map<String, Object> dataSourceData);

/**
* Deletes {@link DataSource} corresponding to the DataSource name.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
Expand All @@ -25,24 +25,11 @@

@Getter
@Setter
@AllArgsConstructor
@EqualsAndHashCode
@JsonIgnoreProperties(ignoreUnknown = true)
public class DataSourceMetadata {

public static final String DEFAULT_RESULT_INDEX = "query_execution_result";
public static final int MAX_RESULT_INDEX_NAME_SIZE = 255;
// OS doesn’t allow uppercase: https://tinyurl.com/yse2xdbx
public static final String RESULT_INDEX_NAME_PATTERN = "[a-z0-9_-]+";
public static String INVALID_RESULT_INDEX_NAME_SIZE =
"Result index name size must contains less than "
+ MAX_RESULT_INDEX_NAME_SIZE
+ " characters";
public static String INVALID_CHAR_IN_RESULT_INDEX_NAME =
"Result index name has invalid character. Valid characters are a-z, 0-9, -(hyphen) and"
+ " _(underscore)";
public static String INVALID_RESULT_INDEX_PREFIX =
"Result index must start with " + DEFAULT_RESULT_INDEX;

@JsonProperty private String name;

@JsonProperty private String description;
Expand All @@ -57,31 +44,18 @@ public class DataSourceMetadata {

@JsonProperty private String resultIndex;

public static Function<String, String> DATASOURCE_TO_RESULT_INDEX =
datasourceName -> String.format("%s_%s", DEFAULT_RESULT_INDEX, datasourceName);

public DataSourceMetadata(
String name,
String description,
DataSourceType connector,
List<String> allowedRoles,
Map<String, String> properties,
String resultIndex) {
this.name = name;
String errorMessage = validateCustomResultIndex(resultIndex);
if (errorMessage != null) {
throw new IllegalArgumentException(errorMessage);
}
if (resultIndex == null) {
this.resultIndex = fromNameToCustomResultIndex();
} else {
this.resultIndex = resultIndex;
}

this.connector = connector;
this.description = description;
this.description = StringUtils.EMPTY;
this.properties = properties;
this.allowedRoles = allowedRoles;
this.resultIndex = resultIndex;
}

public DataSourceMetadata() {
Expand All @@ -97,56 +71,9 @@ public DataSourceMetadata() {
public static DataSourceMetadata defaultOpenSearchDataSourceMetadata() {
return new DataSourceMetadata(
DEFAULT_DATASOURCE_NAME,
StringUtils.EMPTY,
DataSourceType.OPENSEARCH,
Collections.emptyList(),
ImmutableMap.of(),
null);
}

public String validateCustomResultIndex(String resultIndex) {
if (resultIndex == null) {
return null;
}
if (resultIndex.length() > MAX_RESULT_INDEX_NAME_SIZE) {
return INVALID_RESULT_INDEX_NAME_SIZE;
}
if (!resultIndex.matches(RESULT_INDEX_NAME_PATTERN)) {
return INVALID_CHAR_IN_RESULT_INDEX_NAME;
}
if (resultIndex != null && !resultIndex.startsWith(DEFAULT_RESULT_INDEX)) {
return INVALID_RESULT_INDEX_PREFIX;
}
return null;
}

/**
* Since we are using datasource name to create result index, we need to make sure that the final
* name is valid
*
* @param resultIndex result index name
* @return valid result index name
*/
private String convertToValidResultIndex(String resultIndex) {
// Limit Length
if (resultIndex.length() > MAX_RESULT_INDEX_NAME_SIZE) {
resultIndex = resultIndex.substring(0, MAX_RESULT_INDEX_NAME_SIZE);
}

// Pattern Matching: Remove characters that don't match the pattern
StringBuilder validChars = new StringBuilder();
for (char c : resultIndex.toCharArray()) {
if (String.valueOf(c).matches(RESULT_INDEX_NAME_PATTERN)) {
validChars.append(c);
}
}
return validChars.toString();
}

public String fromNameToCustomResultIndex() {
if (name == null) {
throw new IllegalArgumentException("Datasource name cannot be null");
}
return convertToValidResultIndex(DATASOURCE_TO_RESULT_INDEX.apply(name.toLowerCase()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.opensearch.sql.DataSourceSchemaName;
import org.opensearch.sql.analysis.symbol.Namespace;
Expand Down Expand Up @@ -198,7 +197,6 @@ public Set<DataSourceMetadata> getDataSourceMetadata(boolean isDefaultDataSource
ds ->
new DataSourceMetadata(
ds.getName(),
StringUtils.EMPTY,
ds.getConnectorType(),
Collections.emptyList(),
ImmutableMap.of(),
Expand Down Expand Up @@ -233,9 +231,6 @@ public DataSource getDataSource(String dataSourceName) {
@Override
public void updateDataSource(DataSourceMetadata dataSourceMetadata) {}

@Override
public void patchDataSource(Map<String, Object> dataSourceData) {}

@Override
public void deleteDataSource(String dataSourceName) {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import java.util.LinkedHashMap;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
Expand Down Expand Up @@ -63,7 +62,6 @@ void testIterator() {
dataSource ->
new DataSourceMetadata(
dataSource.getName(),
StringUtils.EMPTY,
dataSource.getConnectorType(),
Collections.emptyList(),
ImmutableMap.of(),
Expand Down
4 changes: 1 addition & 3 deletions datasources/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ repositories {
dependencies {
implementation project(':core')
implementation project(':protocol')
implementation project(':opensearch')
implementation project(':legacy')
implementation group: 'org.opensearch', name: 'opensearch', version: "${opensearch_version}"
implementation group: 'org.opensearch', name: 'opensearch-x-content', version: "${opensearch_version}"
implementation group: 'org.opensearch', name: 'common-utils', version: "${opensearch_build}"
Expand All @@ -37,7 +35,7 @@ dependencies {
test {
useJUnitPlatform()
testLogging {
events "skipped", "failed"
events "passed", "skipped", "failed"
exceptionFormat "full"
}
}
Expand Down

This file was deleted.

This file was deleted.

Loading
Loading