From 30d88f9c2478592b922319afc178ff6c35521643 Mon Sep 17 00:00:00 2001 From: David Venable Date: Fri, 17 Nov 2023 12:34:51 -0800 Subject: [PATCH 1/8] Use Amazon Linux 2023 as the base image for the Data Prepper Docker image. This install Temurin for the Amazon Linux 2 distribution. Resolves #3505. (#3671) Signed-off-by: David Venable --- release/docker/Dockerfile | 13 +++++++++---- release/docker/adoptium.repo | 6 ++++++ release/docker/build.gradle | 1 + 3 files changed, 16 insertions(+), 4 deletions(-) create mode 100644 release/docker/adoptium.repo diff --git a/release/docker/Dockerfile b/release/docker/Dockerfile index c4c138c0a6..dcc586dc52 100644 --- a/release/docker/Dockerfile +++ b/release/docker/Dockerfile @@ -1,4 +1,5 @@ -FROM eclipse-temurin:17-jre-jammy +FROM public.ecr.aws/amazonlinux/amazonlinux:2023 + ARG PIPELINE_FILEPATH ARG CONFIG_FILEPATH ARG ARCHIVE_FILE @@ -9,9 +10,13 @@ ENV ENV_CONFIG_FILEPATH=$CONFIG_FILEPATH ENV ENV_PIPELINE_FILEPATH=$PIPELINE_FILEPATH # Update all packages -RUN apt -y update -RUN apt -y install bash bc -RUN apt -y full-upgrade +RUN dnf -y update +RUN dnf -y install bash bc +RUN dnf -y upgrade + +# Setup the Adoptium package repo and install Temurin Java +ADD adoptium.repo /etc/yum.repos.d/adoptium.repo +RUN dnf -y install temurin-17-jdk RUN mkdir -p /var/log/data-prepper ADD $ARCHIVE_FILE /usr/share diff --git a/release/docker/adoptium.repo b/release/docker/adoptium.repo new file mode 100644 index 0000000000..c54ffe9b4a --- /dev/null +++ b/release/docker/adoptium.repo @@ -0,0 +1,6 @@ +[adoptium] +name=Adoptium +baseurl=https://packages.adoptium.net/artifactory/rpm/amazonlinux/2/$basearch +enabled=1 +gpgcheck=1 +gpgkey=https://packages.adoptium.net/artifactory/api/gpg/key/public diff --git a/release/docker/build.gradle b/release/docker/build.gradle index be96705aaf..1816364b6c 100644 --- a/release/docker/build.gradle +++ b/release/docker/build.gradle @@ -12,6 +12,7 @@ docker { tag "${project.rootProject.name}", "${project.version}" files project(':release:archives:linux').tasks.getByName('linuxx64DistTar').archivePath files "${project.projectDir}/config/default-data-prepper-config.yaml", "${project.projectDir}/config/default-keystore.p12" + files 'adoptium.repo' buildArgs(['ARCHIVE_FILE' : project(':release:archives:linux').tasks.getByName('linuxx64DistTar').archiveFileName.get(), 'ARCHIVE_FILE_UNPACKED' : project(':release:archives:linux').tasks.getByName('linuxx64DistTar').archiveFileName.get().replace('.tar.gz', ''), 'CONFIG_FILEPATH' : '/usr/share/data-prepper/config/data-prepper-config.yaml', From 915e84d9f7b80848442161c9d906601337c73e19 Mon Sep 17 00:00:00 2001 From: Karsten Schnitter Date: Tue, 21 Nov 2023 23:48:05 +0100 Subject: [PATCH 2/8] Select require_alias for OS bulk inserts from ISM Policy (#3560) * Select require_alias for OS bulk inserts from ISM Policy This change requires an alias when writing to an aliased index. This avoids creation of an index without alias, when a previous existing alias and index was deleted. It increases robustness of DataPrepper's trace index against OS user interactions. Signed-off-by: Karsten Schnitter * 3342 Determine Alias Configuration from OS During OS sink initialization it is determined from OS, whether the configured index actually is an alias. If so, bulk request will require the index to always be an alias. The response is cached to avoid further requests. This also ensures, that the alias configuration is kept in the initially intended state. After all, this change is about to prevent an automatic index creation for a formerly existing alias. Signed-off-by: Karsten Schnitter * Fix imports for checkstyle Signed-off-by: Karsten Schnitter * Fix integration tests The specific user used in some tests of OpenSerachSinkIT needs get permissions on all aliases to test for their existence. Another bug with determining the alias name is fixed as well. As a final result, the DataPrepper OpenSearch user requires write access to the indices and now additionally read access to the aliases. This can be a change for self-managed indices. Signed-off-by: Karsten Schnitter * Fix Bulk Requests for older OD versions The `require_alias` parameter for bulk requests was only introduced with ES 7.10. Since DataPrepper needs to be compatible down to 6.8, the parameter should not be used in earlier OD versions. This change will apply the parameter only when OpenSearch is detected as target. Signed-off-by: Karsten Schnitter * Add Permission to get Cluster Info For checking the OS version, the test user needs an additional permission. Signed-off-by: Karsten Schnitter --------- Signed-off-by: Karsten Schnitter --- .../OpenSearchSecurityAccessor.java | 5 ++ .../sink/opensearch/OpenSearchSink.java | 7 +-- .../index/AbstractIndexManager.java | 28 +++++++++++ .../sink/opensearch/index/IndexManager.java | 2 + .../index/DefaultIndexManagerTests.java | 46 +++++++++++++++++++ 5 files changed, 85 insertions(+), 3 deletions(-) diff --git a/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSecurityAccessor.java b/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSecurityAccessor.java index 0e93d94d09..1a8c6977a9 100644 --- a/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSecurityAccessor.java +++ b/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSecurityAccessor.java @@ -32,11 +32,16 @@ private void createRole(final String role, final String indexPattern, final Stri final String createRoleJson = Strings.toString( XContentFactory.jsonBuilder() .startObject() + .array("cluster_permissions", "cluster:monitor/main") .startArray("index_permissions") .startObject() .array("index_patterns", new String[]{indexPattern}) .array("allowed_actions", allowedActions) .endObject() + .startObject() + .array("index_patterns", new String[]{"*"}) + .array("allowed_actions", "indices:admin/aliases/get") + .endObject() .endArray() .endObject() ); diff --git a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java index f7de43a527..159f543f52 100644 --- a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java +++ b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java @@ -228,17 +228,18 @@ private void doInitializeInternal() throws IOException { } indexManager.setupIndex(); + final Boolean requireAlias = indexManager.isIndexAlias(configuredIndexAlias); final boolean isEstimateBulkSizeUsingCompression = openSearchSinkConfig.getIndexConfiguration().isEstimateBulkSizeUsingCompression(); final boolean isRequestCompressionEnabled = openSearchSinkConfig.getConnectionConfiguration().isRequestCompressionEnabled(); if (isEstimateBulkSizeUsingCompression && isRequestCompressionEnabled) { final int maxLocalCompressionsForEstimation = openSearchSinkConfig.getIndexConfiguration().getMaxLocalCompressionsForEstimation(); - bulkRequestSupplier = () -> new JavaClientAccumulatingCompressedBulkRequest(new BulkRequest.Builder(), bulkSize, maxLocalCompressionsForEstimation); + bulkRequestSupplier = () -> new JavaClientAccumulatingCompressedBulkRequest(new BulkRequest.Builder().requireAlias(requireAlias), bulkSize, maxLocalCompressionsForEstimation); } else if (isEstimateBulkSizeUsingCompression) { LOG.warn("Estimate bulk request size using compression was enabled but request compression is disabled. " + "Estimating bulk request size without compression."); - bulkRequestSupplier = () -> new JavaClientAccumulatingUncompressedBulkRequest(new BulkRequest.Builder()); + bulkRequestSupplier = () -> new JavaClientAccumulatingUncompressedBulkRequest(new BulkRequest.Builder().requireAlias(requireAlias)); } else { - bulkRequestSupplier = () -> new JavaClientAccumulatingUncompressedBulkRequest(new BulkRequest.Builder()); + bulkRequestSupplier = () -> new JavaClientAccumulatingUncompressedBulkRequest(new BulkRequest.Builder().requireAlias(requireAlias)); } final int maxRetries = openSearchSinkConfig.getRetryConfiguration().getMaxRetries(); diff --git a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/AbstractIndexManager.java b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/AbstractIndexManager.java index 2aca8b0275..86974bec13 100644 --- a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/AbstractIndexManager.java +++ b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/AbstractIndexManager.java @@ -14,6 +14,8 @@ import org.opensearch.client.opensearch.cluster.GetClusterSettingsRequest; import org.opensearch.client.opensearch.cluster.GetClusterSettingsResponse; import org.opensearch.client.opensearch.indices.CreateIndexRequest; +import org.opensearch.client.opensearch.indices.ExistsAliasRequest; +import org.opensearch.client.transport.endpoints.BooleanResponse; import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; import org.opensearch.dataprepper.plugins.sink.opensearch.OpenSearchSinkConfiguration; import org.slf4j.Logger; @@ -50,6 +52,8 @@ public abstract class AbstractIndexManager implements IndexManager { protected IsmPolicyManagementStrategy ismPolicyManagementStrategy; private final TemplateStrategy templateStrategy; protected String indexPrefix; + private Boolean isIndexAlias; + private boolean isIndexAliasChecked; private static final Logger LOG = LoggerFactory.getLogger(AbstractIndexManager.class); @@ -112,6 +116,10 @@ public static String getIndexAliasWithDate(final String indexAlias) { return indexAlias.replaceAll(TIME_PATTERN_REGULAR_EXPRESSION, "") + suffix; } + private void initalizeIsIndexAlias(final String indexAlias) { + + } + private void initializeIndexPrefixAndSuffix(final String indexAlias){ final DateTimeFormatter dateFormatter = getDatePatternFormatter(indexAlias); if (dateFormatter != null) { @@ -176,6 +184,26 @@ public static ZonedDateTime getCurrentUtcTime() { return LocalDateTime.now().atZone(ZoneId.systemDefault()).withZoneSameInstant(UTC_ZONE_ID); } + @Override + public Boolean isIndexAlias(final String dynamicIndexAlias) throws IOException { + if (isIndexAliasChecked == false) { + try { + // Try to get the OpenSearch version. This fails on older OpenDistro versions, that do not support + // `require_alias` as a bulk API parameter. All OpenSearch versions do, as this was introduced in + // ES 7.10. + openSearchClient.info(); + ExistsAliasRequest request = new ExistsAliasRequest.Builder().name(dynamicIndexAlias).build(); + BooleanResponse response = openSearchClient.indices().existsAlias(request); + isIndexAlias = response.value() && checkISMEnabled(); + } catch (RuntimeException ex) { + isIndexAlias = null; + } finally { + isIndexAliasChecked = true; + } + } + return isIndexAlias; + } + final boolean checkISMEnabled() throws IOException { final GetClusterSettingsRequest request = new GetClusterSettingsRequest.Builder() .includeDefaults(true) diff --git a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/IndexManager.java b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/IndexManager.java index ceb1c829a6..1e271b0e14 100644 --- a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/IndexManager.java +++ b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/IndexManager.java @@ -3,6 +3,8 @@ import java.io.IOException; public interface IndexManager{ + void setupIndex() throws IOException; String getIndexName(final String indexAlias) throws IOException; + Boolean isIndexAlias(final String indexAlias) throws IOException; } diff --git a/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DefaultIndexManagerTests.java b/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DefaultIndexManagerTests.java index 6522a93e98..d18c7cbb36 100644 --- a/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DefaultIndexManagerTests.java +++ b/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DefaultIndexManagerTests.java @@ -22,6 +22,7 @@ import org.opensearch.client.opensearch.cluster.GetClusterSettingsResponse; import org.opensearch.client.opensearch.cluster.OpenSearchClusterClient; import org.opensearch.client.opensearch.indices.CreateIndexRequest; +import org.opensearch.client.opensearch.indices.ExistsAliasRequest; import org.opensearch.client.opensearch.indices.ExistsRequest; import org.opensearch.client.opensearch.indices.GetTemplateRequest; import org.opensearch.client.opensearch.indices.GetTemplateResponse; @@ -313,6 +314,51 @@ void constructor_NullConfiguration() { verify(indexConfiguration).getIsmPolicyFile(); } + @Test + void isIndexAlias_True() throws IOException { + defaultIndexManager = indexManagerFactory.getIndexManager( + IndexType.CUSTOM, openSearchClient, restHighLevelClient, openSearchSinkConfiguration, templateStrategy); + when(openSearchIndicesClient.existsAlias(any(ExistsAliasRequest.class))).thenReturn(new BooleanResponse(true)); + when(clusterSettingsParser.getStringValueClusterSetting(any(GetClusterSettingsResponse.class), anyString())).thenReturn("true"); + assertEquals(true, defaultIndexManager.isIndexAlias(INDEX_ALIAS)); + verify(openSearchSinkConfiguration, times(2)).getIndexConfiguration(); + verify(indexConfiguration).getIsmPolicyFile(); + verify(indexConfiguration).getIndexAlias(); + verify(openSearchClient).indices(); + verify(openSearchIndicesClient).existsAlias(any(ExistsAliasRequest.class)); + verify(openSearchClient).cluster(); + verify(openSearchClusterClient).getSettings(any(GetClusterSettingsRequest.class)); + } + + @Test + void isIndexAlias_False_NoAlias() throws IOException { + defaultIndexManager = indexManagerFactory.getIndexManager( + IndexType.CUSTOM, openSearchClient, restHighLevelClient, openSearchSinkConfiguration, templateStrategy); + when(openSearchIndicesClient.existsAlias(any(ExistsAliasRequest.class))).thenReturn(new BooleanResponse(false)); + assertEquals(false, defaultIndexManager.isIndexAlias(INDEX_ALIAS)); + verify(openSearchSinkConfiguration, times(2)).getIndexConfiguration(); + verify(indexConfiguration).getIsmPolicyFile(); + verify(indexConfiguration).getIndexAlias(); + verify(openSearchClient).indices(); + verify(openSearchIndicesClient).existsAlias(any(ExistsAliasRequest.class)); + } + + @Test + void isIndexAlias_False_NoISM() throws IOException { + defaultIndexManager = indexManagerFactory.getIndexManager( + IndexType.CUSTOM, openSearchClient, restHighLevelClient, openSearchSinkConfiguration, templateStrategy); + when(openSearchIndicesClient.existsAlias(any(ExistsAliasRequest.class))).thenReturn(new BooleanResponse(true)); + when(clusterSettingsParser.getStringValueClusterSetting(any(GetClusterSettingsResponse.class), anyString())).thenReturn("false"); + assertEquals(false, defaultIndexManager.isIndexAlias(INDEX_ALIAS)); + verify(openSearchSinkConfiguration, times(2)).getIndexConfiguration(); + verify(indexConfiguration).getIsmPolicyFile(); + verify(indexConfiguration).getIndexAlias(); + verify(openSearchClient).indices(); + verify(openSearchIndicesClient).existsAlias(any(ExistsAliasRequest.class)); + verify(openSearchClient).cluster(); + verify(openSearchClusterClient).getSettings(any(GetClusterSettingsRequest.class)); + } + @Test void checkISMEnabled_True() throws IOException { defaultIndexManager = indexManagerFactory.getIndexManager( From dcd147d7c889e3a50238efac0b4eef2c374c3056 Mon Sep 17 00:00:00 2001 From: kkondaka <41027584+kkondaka@users.noreply.github.com> Date: Mon, 27 Nov 2023 08:00:42 -0800 Subject: [PATCH 3/8] Fix crash in Kafka consumer when negative acknowledments are received (#3691) Signed-off-by: Krishna Kondaka Co-authored-by: Krishna Kondaka --- .../plugins/kafka/consumer/KafkaCustomConsumer.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/data-prepper-plugins/kafka-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumer.java b/data-prepper-plugins/kafka-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumer.java index 479303bf7b..099daf754f 100644 --- a/data-prepper-plugins/kafka-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumer.java +++ b/data-prepper-plugins/kafka-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumer.java @@ -224,14 +224,20 @@ private void addAcknowledgedOffsets(final TopicPartition topicPartition, final R if (Objects.isNull(commitTracker) && errLogRateLimiter.isAllowed(System.currentTimeMillis())) { LOG.error("Commit tracker not found for TopicPartition: {}", topicPartition); + return; } - final OffsetAndMetadata offsetAndMetadata = - partitionCommitTrackerMap.get(partitionId).addCompletedOffsets(offsetRange); + final OffsetAndMetadata offsetAndMetadata = commitTracker.addCompletedOffsets(offsetRange); updateOffsetsToCommit(topicPartition, offsetAndMetadata); } private void resetOffsets() { + // resetting offsets is similar to committing acknowledged offsets. Throttle the frequency of resets by + // checking current time with last commit time. Same "lastCommitTime" and commit interval are used in both cases + long currentTimeMillis = System.currentTimeMillis(); + if ((currentTimeMillis - lastCommitTime) < topicConfig.getCommitInterval().toMillis()) { + return; + } if (partitionsToReset.size() > 0) { partitionsToReset.forEach(partition -> { try { @@ -244,6 +250,8 @@ private void resetOffsets() { consumer.seek(partition, offsetAndMetadata); } partitionCommitTrackerMap.remove(partition.partition()); + final long epoch = getCurrentTimeNanos(); + ownedPartitionsEpoch.put(partition, epoch); } catch (Exception e) { LOG.error("Failed to seek to last committed offset upon negative acknowledgement {}", partition, e); } @@ -493,7 +501,6 @@ public void onPartitionsAssigned(Collection partitions) { continue; } LOG.info("Assigned partition {}", topicPartition); - partitionCommitTrackerMap.remove(topicPartition.partition()); ownedPartitionsEpoch.put(topicPartition, epoch); } } From 7e5746b0ff84d7096c9a1fbf539fccb4cd951cfd Mon Sep 17 00:00:00 2001 From: Qi Chen Date: Mon, 27 Nov 2023 10:02:33 -0600 Subject: [PATCH 4/8] TST: validate special data in opensearch sink (#3685) * TST: validate special data in opensearch sink Signed-off-by: George Chen --- .../sink/opensearch/OpenSearchSinkIT.java | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java b/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java index a5237f21de..ace3a35dac 100644 --- a/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java +++ b/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java @@ -25,6 +25,7 @@ import org.junit.jupiter.params.provider.ArgumentsProvider; import org.junit.jupiter.params.provider.ArgumentsSource; import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; import org.opensearch.client.Request; @@ -99,6 +100,9 @@ import static org.opensearch.dataprepper.plugins.sink.opensearch.OpenSearchIntegrationHelper.wipeAllTemplates; public class OpenSearchSinkIT { + private static final int LUCENE_CHAR_LENGTH_LIMIT = 32_766; + private static final String TEST_STRING_WITH_SPECIAL_CHARS = "Hello! Data-Prepper? #Example123"; + private static final String TEST_STRING_WITH_NON_LATIN_CHARS = "Привет,Γειά σας,こんにちは,你好"; private static final String PLUGIN_NAME = "opensearch"; private static final String PIPELINE_NAME = "integTestPipeline"; private static final String TEST_CUSTOM_INDEX_POLICY_FILE = "test-custom-index-policy-file.json"; @@ -969,6 +973,33 @@ public void testEventOutput() throws IOException, InterruptedException { sink.shutdown(); } + @ParameterizedTest + @MethodSource("getAttributeTestSpecialAndExtremeValues") + public void testEventOutputWithSpecialAndExtremeValues(final Object testValue) throws IOException, InterruptedException { + final String testIndexAlias = "test-alias"; + final String testField = "value"; + final Map data = new HashMap<>(); + data.put(testField, testValue); + final Event testEvent = JacksonEvent.builder() + .withData(data) + .withEventType("event") + .build(); + + final List> testRecords = Collections.singletonList(new Record<>(testEvent)); + + final PluginSetting pluginSetting = generatePluginSetting(null, testIndexAlias, null); + final OpenSearchSink sink = createObjectUnderTest(pluginSetting, true); + sink.output(testRecords); + + final List> retSources = getSearchResponseDocSources(testIndexAlias); + final Map expectedContent = new HashMap<>(); + expectedContent.put(testField, testValue); + + assertThat(retSources.size(), equalTo(1)); + assertThat(retSources.get(0), equalTo(expectedContent)); + sink.shutdown(); + } + @ParameterizedTest @ValueSource(strings = {"info/ids/id", "id"}) public void testOpenSearchDocumentId(final String testDocumentIdField) throws IOException, InterruptedException { @@ -1436,4 +1467,25 @@ private void createV1IndexTemplate(final String templateName, final String index private static boolean isES6() { return DeclaredOpenSearchVersion.OPENDISTRO_0_10.compareTo(OpenSearchIntegrationHelper.getVersion()) >= 0; } + + private static Stream getAttributeTestSpecialAndExtremeValues() { + return Stream.of( + null, + Arguments.of(Long.MAX_VALUE), + Arguments.of(Long.MIN_VALUE), + Arguments.of(Integer.MAX_VALUE), + Arguments.of(Integer.MIN_VALUE), + Arguments.of(RandomStringUtils.randomAlphabetic(LUCENE_CHAR_LENGTH_LIMIT)), + Arguments.of(TEST_STRING_WITH_SPECIAL_CHARS), + Arguments.of(TEST_STRING_WITH_NON_LATIN_CHARS), + Arguments.of(Double.MIN_VALUE), + Arguments.of(-Double.MIN_VALUE), + Arguments.of((double) Float.MAX_VALUE), + Arguments.of((double) Float.MIN_VALUE), + Arguments.of((double) -Float.MAX_VALUE), + Arguments.of((double) -Float.MIN_VALUE), + Arguments.of(Boolean.TRUE), + Arguments.of(Boolean.FALSE) + ); + } } From 37e18a5107697d074ce15b19c313b2dc911338a2 Mon Sep 17 00:00:00 2001 From: David Venable Date: Mon, 27 Nov 2023 08:45:36 -0800 Subject: [PATCH 5/8] Updates the opensearch-java client to 2.8.1 and opensearch to 1.3.13. This includes a transitive dependency update to parsson to resolve CVE-2023-4043. (#3689) Update required version of org.json library to resolve CVE-2023-5072. Require a Zookeeper version which resolves CVE-2023-44981. Require a transitive Scala library to resolve CVE-2023-46122. Resolves #3588, #3522, #3491, #3547 Signed-off-by: David Venable --- build.gradle | 10 ++++++++-- performance-test/build.gradle | 9 +++++++++ settings.gradle | 4 ++-- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index 39f6886e44..0ac5e77554 100644 --- a/build.gradle +++ b/build.gradle @@ -116,6 +116,12 @@ subprojects { } because 'the build fails if the Log4j API is not update along with log4j-core' } + implementation('org.apache.zookeeper:zookeeper') { + version { + require '3.7.2' + } + because 'Fixes CVE-2023-44981' + } implementation('com.google.code.gson:gson') { version { require '2.8.9' @@ -196,9 +202,9 @@ subprojects { } implementation('org.json:json') { version { - require '20230618' + require '20231013' } - because 'CVE from transitive dependencies' + because 'CVE-2023-5072, CVE from transitive dependencies' } } } diff --git a/performance-test/build.gradle b/performance-test/build.gradle index b5ffad15f1..36a74db08a 100644 --- a/performance-test/build.gradle +++ b/performance-test/build.gradle @@ -22,6 +22,15 @@ dependencies { gatlingImplementation 'software.amazon.awssdk:auth:2.20.67' implementation 'com.fasterxml.jackson.core:jackson-core' testRuntimeOnly testLibs.junit.engine + + constraints { + zinc('org.scala-sbt:io_2.13') { + version { + require '1.9.7' + } + because 'Fixes CVE-2023-46122' + } + } } test { diff --git a/settings.gradle b/settings.gradle index b674d1689d..2e115a34a2 100644 --- a/settings.gradle +++ b/settings.gradle @@ -26,9 +26,9 @@ dependencyResolutionManagement { library('protobuf-util', 'com.google.protobuf', 'protobuf-java-util').versionRef('protobuf') version('opentelemetry', '0.16.0-alpha') library('opentelemetry-proto', 'io.opentelemetry.proto', 'opentelemetry-proto').versionRef('opentelemetry') - version('opensearchJava', '2.5.0') + version('opensearchJava', '2.8.1') library('opensearch-java', 'org.opensearch.client', 'opensearch-java').versionRef('opensearchJava') - version('opensearch', '1.3.8') + version('opensearch', '1.3.13') library('opensearch-client', 'org.opensearch.client', 'opensearch-rest-client').versionRef('opensearch') library('opensearch-rhlc', 'org.opensearch.client', 'opensearch-rest-high-level-client').versionRef('opensearch') version('spring', '5.3.28') From d2382708d185f64bb0d24876e92443da16706e00 Mon Sep 17 00:00:00 2001 From: Asif Sohail Mohammed Date: Mon, 27 Nov 2023 10:49:02 -0600 Subject: [PATCH 6/8] Fix S3 scan failing tests (#3693) Signed-off-by: Asif Sohail Mohammed --- .../source/s3/S3ScanObjectWorkerIT.java | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/data-prepper-plugins/s3-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerIT.java b/data-prepper-plugins/s3-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerIT.java index 48c5862155..9ed910f33a 100644 --- a/data-prepper-plugins/s3-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerIT.java +++ b/data-prepper-plugins/s3-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerIT.java @@ -231,8 +231,8 @@ void parseS3Object_parquet_correctly_with_bucket_scan_and_loads_data_into_Buffer final String keyPrefix = "s3source/s3-scan/" + recordsGenerator.getFileExtension() + "/" + Instant.now().toEpochMilli(); final String buketOptionYaml = "name: " + bucket + "\n" + - "key_prefix:\n" + - " include:\n" + + "filter:\n" + + " include_prefix:\n" + " - " + keyPrefix + "\n" + " exclude_suffix:\n" + " - .csv\n" + @@ -247,7 +247,7 @@ void parseS3Object_parquet_correctly_with_bucket_scan_and_loads_data_into_Buffer final ScanOptions startTimeAndRangeScanOptions = new ScanOptions.Builder() .setBucketOption(objectMapper.readValue(buketOptionYaml, S3ScanBucketOption.class)) .setStartDateTime(LocalDateTime.now().minusDays(1)) - .setRange(Duration.parse("P2DT10M")) + .setEndDateTime(LocalDateTime.now().plus(Duration.ofMinutes(5))) .build(); final ScanObjectWorker objectUnderTest = createObjectUnderTest(recordsGenerator, @@ -279,8 +279,8 @@ void parseS3Object_correctly_with_bucket_scan_and_loads_data_into_Buffer( String keyPrefix = "s3source/s3-scan/" + recordsGenerator.getFileExtension() + "/" + Instant.now().toEpochMilli(); final String key = getKeyString(keyPrefix,recordsGenerator, shouldCompress); final String buketOptionYaml = "name: " + bucket + "\n" + - "key_prefix:\n" + - " include:\n" + + "filter:\n" + + " include_prefix:\n" + " - " + keyPrefix; scanOptions.setBucketOption(objectMapper.readValue(buketOptionYaml, S3ScanBucketOption.class)); @@ -320,10 +320,10 @@ void parseS3Object_correctly_with_bucket_scan_and_loads_data_into_Buffer_and_del when(s3SourceConfig.isDeleteS3ObjectsOnRead()).thenReturn(deleteS3Objects); String keyPrefix = "s3source/s3-scan/" + recordsGenerator.getFileExtension() + "/" + Instant.now().toEpochMilli(); - final String key = getKeyString(keyPrefix,recordsGenerator, shouldCompress); + final String key = getKeyString(keyPrefix, recordsGenerator, shouldCompress); final String buketOptionYaml = "name: " + bucket + "\n" + - "key_prefix:\n" + - " include:\n" + + "filter:\n" + + " include_prefix:\n" + " - " + keyPrefix; final ScanOptions.Builder startTimeAndEndTimeScanOptions = ScanOptions.builder() @@ -386,17 +386,17 @@ public Stream provideArguments(final ExtensionContext conte final List recordsToAccumulateList = List.of( 100); final List booleanList = List.of(Boolean.TRUE); - final ScanOptions.Builder startTimeAndRangeScanOptions = ScanOptions.builder() - .setStartDateTime(LocalDateTime.now()) - .setRange(Duration.parse("P2DT10H")); - final ScanOptions.Builder endTimeAndRangeScanOptions = ScanOptions.builder() - .setEndDateTime(LocalDateTime.now().plus(Duration.ofHours(1))) - .setRange(Duration.parse("P7DT10H")); + final ScanOptions.Builder startTimeScanOptions = ScanOptions.builder() + .setStartDateTime(LocalDateTime.now()); + final ScanOptions.Builder endTimeScanOptions = ScanOptions.builder() + .setEndDateTime(LocalDateTime.now().plus(Duration.ofHours(1))); final ScanOptions.Builder startTimeAndEndTimeScanOptions = ScanOptions.builder() .setStartDateTime(LocalDateTime.now().minus(Duration.ofMinutes(10))) .setEndDateTime(LocalDateTime.now().plus(Duration.ofHours(1))); + final ScanOptions.Builder rangeScanOptions = ScanOptions.builder() + .setRange(Duration.parse("P7DT10H")); - List scanOptions = List.of(startTimeAndRangeScanOptions,endTimeAndRangeScanOptions,startTimeAndEndTimeScanOptions); + List scanOptions = List.of(startTimeScanOptions, endTimeScanOptions, startTimeAndEndTimeScanOptions, rangeScanOptions); return recordsGenerators .stream() .flatMap(recordsGenerator -> numberOfRecordsList From b8fcffd4842df1bc9dc2820d0028d5bbb1ad49d0 Mon Sep 17 00:00:00 2001 From: David Venable Date: Mon, 27 Nov 2023 08:52:29 -0800 Subject: [PATCH 7/8] Updates werkzeug to 3.0.1 which fixes CVE-2023-46136. This required updating to dash 2.14.1 as 2.13 does not support newer versions of werkzeug. Resolves #3552. (#3690) Signed-off-by: David Venable --- .../trace-analytics-sample-app/sample-app/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/trace-analytics-sample-app/sample-app/requirements.txt b/examples/trace-analytics-sample-app/sample-app/requirements.txt index 5c39bb7793..2b55521c2b 100644 --- a/examples/trace-analytics-sample-app/sample-app/requirements.txt +++ b/examples/trace-analytics-sample-app/sample-app/requirements.txt @@ -1,4 +1,4 @@ -dash==2.13.0 +dash==2.14.1 mysql-connector==2.2.9 opentelemetry-exporter-otlp==1.20.0 opentelemetry-instrumentation-flask==0.41b0 @@ -7,4 +7,4 @@ opentelemetry-instrumentation-requests==0.41b0 opentelemetry-sdk==1.20.0 protobuf==3.20.3 urllib3==2.0.7 -werkzeug==2.2.3 \ No newline at end of file +werkzeug==3.0.1 \ No newline at end of file From c88c27f852a8c90ef87ef481ac446634230fb1a7 Mon Sep 17 00:00:00 2001 From: David Venable Date: Mon, 27 Nov 2023 10:58:50 -0800 Subject: [PATCH 8/8] Require Apache Avro 1.11.3 to fix CVE-2023-39410. Resolves #3430. (#3695) Signed-off-by: David Venable --- build.gradle | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/build.gradle b/build.gradle index 0ac5e77554..1fbb592321 100644 --- a/build.gradle +++ b/build.gradle @@ -98,6 +98,12 @@ subprojects { testImplementation testLibs.hamcrest testImplementation testLibs.awaitility constraints { + implementation('org.apache.avro:avro') { + version { + require '1.11.3' + } + because 'Fixes CVE-2023-39410.' + } implementation('org.apache.httpcomponents:httpclient') { version { require '4.5.14'