diff --git a/data-prepper-plugins/kafka-connect-plugins/README.md b/data-prepper-plugins/kafka-connect-plugins/README.md index e4bd43dd68..e3517b2885 100644 --- a/data-prepper-plugins/kafka-connect-plugins/README.md +++ b/data-prepper-plugins/kafka-connect-plugins/README.md @@ -1,5 +1,12 @@ # Kafka Connect Source +The `kafka-connect-plugins` project has not been released. And the maintainers have no plans to pick this work up. +If you wish to pick this project up, please open a GitHub issue to discuss. +The original code is available in Git history if needed. + + +## Old README information + This is a source plugin that start a Kafka Connect and Connectors. Please note that the Kafka Connect Source has to work with Kafka Buffer. ## Usages diff --git a/data-prepper-plugins/kafka-connect-plugins/build.gradle b/data-prepper-plugins/kafka-connect-plugins/build.gradle deleted file mode 100644 index 8a7d8f20fa..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/build.gradle +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -plugins { - id 'java' -} - -configurations.all { - exclude group: 'org.apache.zookeeper', module: 'zookeeper' -} - -dependencies { - implementation project(':data-prepper-plugins:aws-plugin-api') - implementation project(':data-prepper-plugins:common') - implementation project(':data-prepper-plugins:kafka-plugins') - implementation 'org.apache.kafka:connect-runtime:3.5.1' - implementation 'software.amazon.awssdk:sts' - implementation 'software.amazon.awssdk:secretsmanager' - implementation 'javax.validation:validation-api:2.0.1.Final' - implementation libs.reflections.core - implementation 'io.micrometer:micrometer-core' - implementation ('io.confluent:kafka-schema-registry:7.5.0') { - exclude group: 'org.glassfish.jersey.containers', module: 'jersey-container-servlet' - exclude group: 'org.glassfish.jersey.inject', module: 'jersey-hk2' - exclude group: 'org.glassfish.jersey.ext', module: 'jersey-bean-validation' - } - // Common Debezium Connector - implementation 'io.debezium:debezium-api:2.3.0.Final' - implementation 'io.debezium:debezium-core:2.3.0.Final' - implementation 'io.debezium:debezium-storage-kafka:2.3.0.Final' - implementation 'io.debezium:debezium-storage-file:2.3.0.Final' - // Debezium MySQL Connector - implementation 'org.antlr:antlr4-runtime:4.10.1' - implementation 'io.debezium:debezium-connector-mysql:2.3.0.Final' - implementation 'io.debezium:debezium-ddl-parser:2.3.0.Final' - implementation 'com.zendesk:mysql-binlog-connector-java:0.28.1' - implementation 'com.mysql:mysql-connector-j:8.0.33' - implementation 'com.github.luben:zstd-jni:1.5.0-2' - // Debezium Postgres connector - implementation 'io.debezium:debezium-connector-postgres:2.3.0.Final' - implementation 'org.postgresql:postgresql:42.5.1' - implementation 'com.google.protobuf:protobuf-java:3.19.6' - // Debezium Mongodb connector - implementation 'io.debezium:debezium-connector-mongodb:2.3.0.Final' - implementation 'org.mongodb:mongodb-driver-core:4.7.1' - implementation 'org.mongodb:mongodb-driver-sync:4.7.1' - implementation 'org.mongodb:bson:4.7.1' - runtimeOnly 'org.mongodb:bson-record-codec:4.7.1' - // test - testImplementation project(':data-prepper-test-common') - testImplementation project(':data-prepper-core') - testImplementation 'org.yaml:snakeyaml:2.0' - testImplementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310' - testImplementation testLibs.mockito.inline -} - -jacocoTestCoverageVerification { - dependsOn jacocoTestReport - violationRules { - rule { //in addition to core projects rule - limit { - minimum = 0.90 - } - } - } -} \ No newline at end of file diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/ConnectorConfig.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/ConnectorConfig.java deleted file mode 100644 index b92e78cdd1..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/ConnectorConfig.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; - -import java.util.List; -import java.util.Properties; - -public abstract class ConnectorConfig { - @JsonProperty("force_update") - public Boolean forceUpdate = false; - private String bootstrapServers; - private Properties authProperties; - - public abstract List buildConnectors(); - - public Properties getAuthProperties() { - return this.authProperties; - } - - public void setAuthProperties(Properties authProperties) { - this.authProperties = authProperties; - } - - public String getBootstrapServers() { - return this.bootstrapServers; - } - - public void setBootstrapServers(String bootStrapServers) { - this.bootstrapServers = bootStrapServers; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/CredentialsConfig.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/CredentialsConfig.java deleted file mode 100644 index 714a681a97..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/CredentialsConfig.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.SecretManagerHelper; - -import java.util.Map; - -public class CredentialsConfig { - private final String username; - private final String password; - - @JsonCreator - public CredentialsConfig(@JsonProperty("plaintext") final PlainText plainText, - @JsonProperty("secret_manager") final SecretManager secretManager) { - if (plainText != null && secretManager != null) { - throw new IllegalArgumentException("plaintext and secret_manager cannot both be set"); - } - if (plainText != null) { - if (plainText.username == null || plainText.password == null) { - throw new IllegalArgumentException("user and password must be set for plaintext credentials"); - } - this.username = plainText.username; - this.password = plainText.password; - } else if (secretManager != null) { - if (secretManager.secretId == null || secretManager.region == null) { - throw new IllegalArgumentException("secretId and region must be set for aws credential type"); - } - final Map secretMap = this.getSecretValueMap(secretManager.stsRoleArn, secretManager.region, secretManager.secretId); - if (!secretMap.containsKey("username") || !secretMap.containsKey("password")) { - throw new RuntimeException("username or password missing in secret manager."); - } - this.username = secretMap.get("username"); - this.password = secretMap.get("password"); - } else { - throw new IllegalArgumentException("plaintext or secret_manager must be set"); - } - } - - private Map getSecretValueMap(String stsRoleArn, String region, String secretId) { - ObjectMapper objectMapper = new ObjectMapper(); - try { - final String secretValue = SecretManagerHelper.getSecretValue(stsRoleArn, region, secretId); - return objectMapper.readValue(secretValue, new TypeReference<>() {}); - } catch (Exception e) { - throw new RuntimeException("Failed to get credentials.", e); - } - } - - public String getUsername() { - return username; - } - - public String getPassword() { - return password; - } - - public static class PlainText { - private String username; - private String password; - - @JsonCreator - public PlainText(@JsonProperty("username") String username, - @JsonProperty("password") String password) { - this.username = username; - this.password = password; - } - } - - public static class SecretManager { - private String region; - private String secretId; - private String stsRoleArn; - - @JsonCreator - public SecretManager(@JsonProperty("sts_role_arn") String stsRoleArn, - @JsonProperty("region") String region, - @JsonProperty("secretId") String secretId) { - this.stsRoleArn = stsRoleArn; - this.region = region; - this.secretId = secretId; - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MongoDBConfig.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MongoDBConfig.java deleted file mode 100644 index 02da26c9d4..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MongoDBConfig.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import jakarta.validation.constraints.NotNull; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -public class MongoDBConfig extends ConnectorConfig { - public static final String CONNECTOR_CLASS = "io.debezium.connector.mongodb.MongoDbConnector"; - private static final String MONGODB_CONNECTION_STRING_FORMAT = "mongodb://%s:%s/?replicaSet=rs0&directConnection=true"; - private static final String DEFAULT_PORT = "27017"; - private static final String DEFAULT_SNAPSHOT_MODE = "never"; - private static final Boolean SSL_ENABLED = false; - private static final Boolean SSL_INVALID_HOST_ALLOWED = false; - private static final String DEFAULT_SNAPSHOT_FETCH_SIZE = "1000"; - @JsonProperty("hostname") - @NotNull - private String hostname; - @JsonProperty("port") - private String port = DEFAULT_PORT; - @JsonProperty("credentials") - private CredentialsConfig credentialsConfig; - @JsonProperty("ingestion_mode") - private IngestionMode ingestionMode = IngestionMode.EXPORT_STREAM; - @JsonProperty("export_config") - private ExportConfig exportConfig = new ExportConfig(); - @JsonProperty("snapshot_fetch_size") - private String snapshotFetchSize = DEFAULT_SNAPSHOT_FETCH_SIZE; - @JsonProperty("collections") - private List collections = new ArrayList<>(); - @JsonProperty("ssl") - private Boolean ssl = SSL_ENABLED; - @JsonProperty("ssl_invalid_host_allowed") - private Boolean sslInvalidHostAllowed = SSL_INVALID_HOST_ALLOWED; - - @Override - public List buildConnectors() { - return collections.stream().map(collection -> { - final String connectorName = collection.getTopicPrefix() + "." + collection.getCollectionName(); - final Map config = buildConfig(collection); - return new Connector(connectorName, config, this.forceUpdate); - }).collect(Collectors.toList()); - } - - public IngestionMode getIngestionMode() { - return this.ingestionMode; - } - - public CredentialsConfig getCredentialsConfig() { - return this.credentialsConfig; - } - - public String getHostname() { - return this.hostname; - } - - public String getPort() { - return this.port; - } - - public Boolean getSSLEnabled() { - return this.ssl; - } - - public Boolean getSSLInvalidHostAllowed() { - return this.sslInvalidHostAllowed; - } - - public List getCollections() { - return this.collections; - } - - public ExportConfig getExportConfig() { - return this.exportConfig; - } - - private Map buildConfig(final CollectionConfig collection) { - Map config = new HashMap<>(); - config.put("connector.class", CONNECTOR_CLASS); - config.put("mongodb.connection.string", String.format(MONGODB_CONNECTION_STRING_FORMAT, hostname, port)); - config.put("mongodb.user", credentialsConfig.getUsername()); - config.put("mongodb.password", credentialsConfig.getPassword()); - config.put("snapshot.mode", DEFAULT_SNAPSHOT_MODE); - config.put("snapshot.fetch.size", snapshotFetchSize); - config.put("topic.prefix", collection.getTopicPrefix()); - config.put("collection.include.list", collection.getCollectionName()); - config.put("mongodb.ssl.enabled", ssl.toString()); - config.put("mongodb.ssl.invalid.hostname.allowed", sslInvalidHostAllowed.toString()); - // Non-configurable properties used to transform CDC data before sending to Kafka. - config.put("transforms", "unwrap"); - config.put("transforms.unwrap.type", "io.debezium.connector.mongodb.transforms.ExtractNewDocumentState"); - config.put("transforms.unwrap.drop.tombstones", "true"); - config.put("transforms.unwrap.delete.handling.mode", "rewrite"); - config.put("transforms.unwrap.add.fields", "op,rs,collection,source.ts_ms,source.db,source.snapshot,ts_ms"); - return config; - } - - public enum IngestionMode { - EXPORT_STREAM("export_stream"), - EXPORT("export"), - STREAM("stream"); - - private static final Map OPTIONS_MAP = Arrays.stream(IngestionMode.values()) - .collect(Collectors.toMap( - value -> value.type, - value -> value - )); - - private final String type; - - IngestionMode(final String type) { - this.type = type; - } - - @JsonCreator - public static IngestionMode fromTypeValue(final String type) { - return OPTIONS_MAP.get(type.toLowerCase()); - } - } - - public static class CollectionConfig { - @JsonProperty("topic_prefix") - @NotNull - private String topicPrefix; - - @JsonProperty("collection") - @NotNull - private String collectionName; - - public String getCollectionName() { - return collectionName; - } - - public String getTopicPrefix() { - return topicPrefix; - } - } - - public static class ExportConfig { - private static int DEFAULT_ITEMS_PER_PARTITION = 4000; - private static String DEFAULT_READ_PREFERENCE = "secondaryPreferred"; - @JsonProperty("acknowledgments") - private Boolean acknowledgments = false; - @JsonProperty("items_per_partition") - private Integer itemsPerPartition = DEFAULT_ITEMS_PER_PARTITION; - @JsonProperty("read_preference") - private String readPreference = DEFAULT_READ_PREFERENCE; - - public boolean getAcknowledgements() { - return this.acknowledgments; - } - - public Integer getItemsPerPartition() { - return this.itemsPerPartition; - } - - public String getReadPreference() { - return this.readPreference; - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MySQLConfig.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MySQLConfig.java deleted file mode 100644 index ea9d0f9f95..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MySQLConfig.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.annotation.JsonProperty; -import jakarta.validation.constraints.NotNull; -import org.apache.kafka.connect.runtime.WorkerConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.stream.Collectors; - -public class MySQLConfig extends ConnectorConfig { - public static final String CONNECTOR_CLASS = "io.debezium.connector.mysql.MySqlConnector"; - private static final String SCHEMA_HISTORY_PRODUCER_PREFIX = "schema.history.internal.producer."; - private static final String SCHEMA_HISTORY_CONSUMER_PREFIX = "schema.history.internal.consumer."; - private static final String TOPIC_DEFAULT_PARTITIONS = "10"; - private static final String TOPIC_DEFAULT_REPLICATION_FACTOR = "-1"; - private static final String SCHEMA_HISTORY = "schemahistory"; - private static final String DEFAULT_SNAPSHOT_MODE = "initial"; - private static final String DEFAULT_PORT = "3306"; - - @JsonProperty("hostname") - @NotNull - private String hostname; - @JsonProperty("port") - private String port = DEFAULT_PORT; - @JsonProperty("credentials") - private CredentialsConfig credentialsConfig; - @JsonProperty("snapshot_mode") - private String snapshotMode = DEFAULT_SNAPSHOT_MODE; - @JsonProperty("tables") - private List tables = new ArrayList<>(); - - @Override - public List buildConnectors() { - return tables.stream().map(table -> { - final String connectorName = table.getTopicPrefix() + "." + table.getTableName(); - final Map config = buildConfig(table, connectorName); - return new Connector(connectorName, config, this.forceUpdate); - }).collect(Collectors.toList()); - } - - private Map buildConfig(final TableConfig table, final String connectorName) { - int databaseServerId = Math.abs(connectorName.hashCode()); - final Map config = new HashMap<>(); - final Properties authProperties = this.getAuthProperties(); - if (authProperties != null) { - authProperties.forEach((k, v) -> { - if (k == WorkerConfig.BOOTSTRAP_SERVERS_CONFIG) { - this.setBootstrapServers(v.toString()); - return; - } - if (v instanceof Class) { - config.put(SCHEMA_HISTORY_PRODUCER_PREFIX + k, ((Class) v).getName()); - config.put(SCHEMA_HISTORY_CONSUMER_PREFIX + k, ((Class) v).getName()); - return; - } - config.put(SCHEMA_HISTORY_PRODUCER_PREFIX + k, v.toString()); - config.put(SCHEMA_HISTORY_CONSUMER_PREFIX + k, v.toString()); - }); - } - config.put("topic.creation.default.partitions", TOPIC_DEFAULT_PARTITIONS); - config.put("topic.creation.default.replication.factor", TOPIC_DEFAULT_REPLICATION_FACTOR); - config.put("connector.class", CONNECTOR_CLASS); - config.put("database.hostname", hostname); - config.put("database.port", port); - config.put("database.user", credentialsConfig.getUsername()); - config.put("database.password", credentialsConfig.getPassword()); - config.put("snapshot.mode", snapshotMode); - config.put("topic.prefix", table.getTopicPrefix()); - config.put("table.include.list", table.getTableName()); - config.put("schema.history.internal.kafka.bootstrap.servers", this.getBootstrapServers()); - config.put("schema.history.internal.kafka.topic", String.join(".", List.of(table.getTopicPrefix(), table.getTableName(), SCHEMA_HISTORY))); - config.put("database.server.id", Integer.toString(databaseServerId)); - // Non-configurable properties used to transform CDC data before sending to Kafka. - config.put("transforms", "unwrap"); - config.put("transforms.unwrap.type", "io.debezium.transforms.ExtractNewRecordState"); - config.put("transforms.unwrap.drop.tombstones", "true"); - config.put("transforms.unwrap.delete.handling.mode", "rewrite"); - config.put("transforms.unwrap.add.fields", "op,table,source.ts_ms,source.db,source.snapshot,ts_ms"); - return config; - } - - private static class TableConfig { - @JsonProperty("topic_prefix") - @NotNull - private String topicPrefix; - - @JsonProperty("table") - @NotNull - private String tableName; - - public String getTableName() { - return tableName; - } - - public String getTopicPrefix() { - return topicPrefix; - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/PostgreSQLConfig.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/PostgreSQLConfig.java deleted file mode 100644 index 205cfcd7a4..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/PostgreSQLConfig.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import jakarta.validation.constraints.NotNull; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -public class PostgreSQLConfig extends ConnectorConfig { - public static final String CONNECTOR_CLASS = "io.debezium.connector.postgresql.PostgresConnector"; - private static final String TOPIC_DEFAULT_PARTITIONS = "10"; - private static final String TOPIC_DEFAULT_REPLICATION_FACTOR = "-1"; - private static final String DEFAULT_PORT = "5432"; - private static final String DEFAULT_SNAPSHOT_MODE = "initial"; - private static final PluginName DEFAULT_DECODING_PLUGIN = PluginName.PGOUTPUT; // default plugin for Aurora PostgreSQL - @JsonProperty("hostname") - @NotNull - private String hostname; - @JsonProperty("port") - private String port = DEFAULT_PORT; - /** - * The name of the PostgreSQL logical decoding plug-in installed on the PostgreSQL server. - * Supported values are decoderbufs, and pgoutput. - */ - @JsonProperty("plugin_name") - private PluginName pluginName = DEFAULT_DECODING_PLUGIN; - @JsonProperty("credentials") - private CredentialsConfig credentialsConfig; - @JsonProperty("snapshot_mode") - private String snapshotMode = DEFAULT_SNAPSHOT_MODE; - @JsonProperty("tables") - private List tables = new ArrayList<>(); - - @Override - public List buildConnectors() { - return tables.stream().map(table -> { - final String connectorName = table.getTopicPrefix() + "." + table.getTableName(); - final Map config = buildConfig(table); - return new Connector(connectorName, config, this.forceUpdate); - }).collect(Collectors.toList()); - } - - private Map buildConfig(final TableConfig tableName) { - Map config = new HashMap<>(); - config.put("topic.creation.default.partitions", TOPIC_DEFAULT_PARTITIONS); - config.put("topic.creation.default.replication.factor", TOPIC_DEFAULT_REPLICATION_FACTOR); - config.put("connector.class", CONNECTOR_CLASS); - config.put("plugin.name", pluginName.type); - config.put("database.hostname", hostname); - config.put("database.port", port); - config.put("database.user", credentialsConfig.getUsername()); - config.put("database.password", credentialsConfig.getPassword()); - config.put("snapshot.mode", snapshotMode); - config.put("topic.prefix", tableName.getTopicPrefix()); - config.put("database.dbname", tableName.getDatabaseName()); - config.put("table.include.list", tableName.getTableName()); - // Non-configurable properties used to transform CDC data before sending to Kafka. - config.put("transforms", "unwrap"); - config.put("transforms.unwrap.type", "io.debezium.transforms.ExtractNewRecordState"); - config.put("transforms.unwrap.drop.tombstones", "true"); - config.put("transforms.unwrap.delete.handling.mode", "rewrite"); - config.put("transforms.unwrap.add.fields", "op,table,source.ts_ms,source.db,source.snapshot,ts_ms"); - return config; - } - - public enum PluginName { - DECODERBUFS("decoderbufs"), - PGOUTPUT("pgoutput"); - - private static final Map OPTIONS_MAP = Arrays.stream(PostgreSQLConfig.PluginName.values()) - .collect(Collectors.toMap( - value -> value.type, - value -> value - )); - - private final String type; - - PluginName(final String type) { - this.type = type; - } - - @JsonCreator - public static PostgreSQLConfig.PluginName fromTypeValue(final String type) { - return OPTIONS_MAP.get(type.toLowerCase()); - } - } - - private static class TableConfig { - @JsonProperty("database") - @NotNull - private String databaseName; - - @JsonProperty("topic_prefix") - @NotNull - private String topicPrefix; - - @JsonProperty("table") - @NotNull - private String tableName; - - public String getDatabaseName() { - return databaseName; - } - - public String getTableName() { - return tableName; - } - - public String getTopicPrefix() { - return topicPrefix; - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/DefaultKafkaConnectConfigSupplier.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/DefaultKafkaConnectConfigSupplier.java deleted file mode 100644 index 39e7f6609a..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/DefaultKafkaConnectConfigSupplier.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -public class DefaultKafkaConnectConfigSupplier implements KafkaConnectConfigSupplier { - private final KafkaConnectConfig kafkaConnectConfig; - public DefaultKafkaConnectConfigSupplier(KafkaConnectConfig kafkaConnectConfig) { - this.kafkaConnectConfig = kafkaConnectConfig; - } - - @Override - public KafkaConnectConfig getConfig() { - return this.kafkaConnectConfig; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfig.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfig.java deleted file mode 100644 index fbed48d949..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfig.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.opensearch.dataprepper.plugins.kafka.configuration.AuthConfig; -import org.opensearch.dataprepper.plugins.kafka.configuration.AwsConfig; -import org.opensearch.dataprepper.plugins.kafka.configuration.EncryptionConfig; -import org.opensearch.dataprepper.plugins.kafka.util.KafkaClusterAuthConfig; - -import java.time.Duration; -import java.util.List; -import java.util.Objects; -import java.util.Properties; - -public class KafkaConnectConfig implements KafkaClusterAuthConfig { - private static final long CONNECTOR_TIMEOUT_MS = 360000L; // 360 seconds - private static final long CONNECT_TIMEOUT_MS = 60000L; // 60 seconds - - @JsonProperty("worker_properties") - private WorkerProperties workerProperties = new WorkerProperties(); - - @JsonProperty("connect_start_timeout") - private Duration connectStartTimeout = Duration.ofMillis(CONNECT_TIMEOUT_MS); - - @JsonProperty("connector_start_timeout") - private Duration connectorStartTimeout = Duration.ofMillis(CONNECTOR_TIMEOUT_MS); - - @JsonProperty("bootstrap_servers") - private List bootstrapServers; - - private AuthConfig authConfig; - private EncryptionConfig encryptionConfig; - private AwsConfig awsConfig; - - public Duration getConnectStartTimeout() { - return connectStartTimeout; - } - - public Duration getConnectorStartTimeout() { - return connectorStartTimeout; - } - - public void setBootstrapServers(final List bootstrapServers) { - this.bootstrapServers = bootstrapServers; - if (Objects.nonNull(bootstrapServers)) { - this.workerProperties.setBootstrapServers(String.join(",", bootstrapServers));; - } - } - - public void setAuthProperties(final Properties authProperties) { - this.workerProperties.setAuthProperties(authProperties); - } - - public void setAuthConfig(AuthConfig authConfig) { - this.authConfig = authConfig; - } - - public void setAwsConfig(AwsConfig awsConfig) { - this.awsConfig = awsConfig; - } - - public void setEncryptionConfig(EncryptionConfig encryptionConfig) { - this.encryptionConfig = encryptionConfig; - } - - public WorkerProperties getWorkerProperties() { - return workerProperties; - } - - @Override - public AwsConfig getAwsConfig() { - return awsConfig; - } - - @Override - public AuthConfig getAuthConfig() { - return authConfig; - } - - @Override - public EncryptionConfig getEncryptionConfig() { - return encryptionConfig; - } - - @Override - public List getBootstrapServers() { - if (Objects.nonNull(bootstrapServers)) { - return bootstrapServers; - } - return null; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigExtension.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigExtension.java deleted file mode 100644 index 18c6aee682..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigExtension.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import org.opensearch.dataprepper.model.annotations.DataPrepperExtensionPlugin; -import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; -import org.opensearch.dataprepper.model.plugin.ExtensionPlugin; -import org.opensearch.dataprepper.model.plugin.ExtensionPoints; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@DataPrepperExtensionPlugin(modelType = KafkaConnectConfig.class, rootKeyJsonPath = "/kafka_connect_config") -public class KafkaConnectConfigExtension implements ExtensionPlugin { - private static final Logger LOG = LoggerFactory.getLogger(KafkaConnectConfigExtension.class); - private DefaultKafkaConnectConfigSupplier defaultKafkaConnectConfigSupplier; - - @DataPrepperPluginConstructor - public KafkaConnectConfigExtension(final KafkaConnectConfig kafkaConnectConfig) { - this.defaultKafkaConnectConfigSupplier = new DefaultKafkaConnectConfigSupplier(kafkaConnectConfig); - } - - @Override - public void apply(ExtensionPoints extensionPoints) { - LOG.info("Applying Kafka Connect Config Extension."); - extensionPoints.addExtensionProvider(new KafkaConnectConfigProvider(this.defaultKafkaConnectConfigSupplier)); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigProvider.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigProvider.java deleted file mode 100644 index 2e2f5e01f6..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigProvider.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import org.opensearch.dataprepper.model.plugin.ExtensionProvider; - -import java.util.Optional; - -public class KafkaConnectConfigProvider implements ExtensionProvider { - private final KafkaConnectConfigSupplier kafkaConnectConfigSupplier; - public KafkaConnectConfigProvider(KafkaConnectConfigSupplier kafkaConnectConfigSupplier) { - this.kafkaConnectConfigSupplier = kafkaConnectConfigSupplier; - } - - @Override - public Optional provideInstance(Context context) { - return Optional.of(this.kafkaConnectConfigSupplier); - } - - @Override - public Class supportedClass() { - return KafkaConnectConfigSupplier.class; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigSupplier.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigSupplier.java deleted file mode 100644 index c5805378d9..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigSupplier.java +++ /dev/null @@ -1,10 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -public interface KafkaConnectConfigSupplier { - KafkaConnectConfig getConfig(); -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/WorkerProperties.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/WorkerProperties.java deleted file mode 100644 index 1de8fff29c..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/WorkerProperties.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.kafka.connect.runtime.WorkerConfig; - -import java.time.Duration; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -public class WorkerProperties { - private static final String KEY_CONVERTER = "org.apache.kafka.connect.json.JsonConverter"; - private static final String KEY_CONVERTER_SCHEMAS_ENABLE = "false"; - private static final String VALUE_CONVERTER_SCHEMAS_ENABLE = "false"; - private static final String VALUE_CONVERTER = "org.apache.kafka.connect.json.JsonConverter"; - private static final Integer OFFSET_STORAGE_PARTITIONS = 25; - private static final Long OFFSET_FLUSH_INTERVAL_MS = 60000L; - private static final Long OFFSET_FLUSH_TIMEOUT_MS = 5000L; - private static final Integer STATUS_STORAGE_PARTITIONS = 5; - private static final Long HEARTBEAT_INTERVAL_MS = 3000L; - private static final Long SESSION_TIMEOUT_MS = 30000L; - private static final long CONNECTOR_REBALANCE_DELAY_MS = 300000L; // 300 seconds - private static final String DEFAULT_GROUP_ID = "localGroup"; - private static final String DEFAULT_CLIENT_ID = "localClient"; - private static final String DEFAULT_CONFIG_STORAGE_TOPIC = "config-storage-topic"; - private static final String DEFAULT_OFFSET_STORAGE_TOPIC = "offset-storage-topic"; - private static final String DEFAULT_STATUS_STORAGE_TOPIC = "status-storage-topic"; - private final Integer offsetStorageReplicationFactor = -1; - private final Integer configStorageReplicationFactor = -1; - private final Integer statusStorageReplicationFactor = -1; - private String keyConverter = KEY_CONVERTER; - private String keyConverterSchemasEnable = KEY_CONVERTER_SCHEMAS_ENABLE; - private String valueConverter = VALUE_CONVERTER; - private String valueConverterSchemasEnable = VALUE_CONVERTER_SCHEMAS_ENABLE; - - @JsonProperty("group_id") - private String groupId = DEFAULT_GROUP_ID; - @JsonProperty("config_storage_topic") - private String configStorageTopic = DEFAULT_CONFIG_STORAGE_TOPIC; - @JsonProperty("offset_storage_topic") - private String offsetStorageTopic = DEFAULT_OFFSET_STORAGE_TOPIC; - @JsonProperty("status_storage_topic") - private String statusStorageTopic = DEFAULT_STATUS_STORAGE_TOPIC; - @JsonProperty("client_id") - private String clientId = DEFAULT_CLIENT_ID; - @JsonProperty("offset_storage_partitions") - private Integer offsetStoragePartitions = OFFSET_STORAGE_PARTITIONS; - @JsonProperty("offset_flush_interval") - private Duration offsetFlushInterval = Duration.ofMillis(OFFSET_FLUSH_INTERVAL_MS); - @JsonProperty("offset_flush_timeout") - private Duration offsetFlushTimeout = Duration.ofMillis(OFFSET_FLUSH_TIMEOUT_MS); - @JsonProperty("status_storage_partitions") - private Integer statusStoragePartitions = STATUS_STORAGE_PARTITIONS; - @JsonProperty("heartbeat_interval") - private Duration heartBeatInterval = Duration.ofMillis(HEARTBEAT_INTERVAL_MS); - @JsonProperty("session_timeout") - private Duration sessionTimeout = Duration.ofMillis(SESSION_TIMEOUT_MS); - @JsonProperty("connector_rebalance_max_delay") - private Duration connectorRebalanceDelay = Duration.ofMillis(CONNECTOR_REBALANCE_DELAY_MS); - private String keyConverterSchemaRegistryUrl; - private String valueConverterSchemaRegistryUrl; - private String bootstrapServers; - private Properties authProperties; - - public String getKeyConverter() { - return keyConverter; - } - - public String getKeyConverterSchemasEnable() { - return keyConverterSchemasEnable; - } - - public String getKeyConverterSchemaRegistryUrl() { - return keyConverterSchemaRegistryUrl; - } - - public String getValueConverter() { - return valueConverter; - } - - public String getValueConverterSchemasEnable() { - return valueConverterSchemasEnable; - } - - public String getValueConverterSchemaRegistryUrl() { - return valueConverterSchemaRegistryUrl; - } - - public Integer getOffsetStoragePartitions() { - return offsetStoragePartitions; - } - - public Long getOffsetFlushInterval() { - return offsetFlushInterval.toMillis(); - } - - public Long getOffsetFlushTimeout() { - return offsetFlushTimeout.toMillis(); - } - - public Long getRebalanceMaxDelay() { - return connectorRebalanceDelay.toMillis(); - } - - public Integer getStatusStoragePartitions() { - return statusStoragePartitions; - } - - public Long getHeartBeatInterval() { - return heartBeatInterval.toMillis(); - } - - public Long getSessionTimeout() { - return sessionTimeout.toMillis(); - } - - public String getBootstrapServers() { - return bootstrapServers; - } - - public void setBootstrapServers(final String bootstrapServers) { - this.bootstrapServers = bootstrapServers; - } - - public String getGroupId() { - return groupId; - } - - public String getClientId() { - return clientId; - } - - public String getConfigStorageTopic() { - return configStorageTopic; - } - - public Integer getConfigStorageReplicationFactor() { - return configStorageReplicationFactor; - } - - public String getOffsetStorageTopic() { - return offsetStorageTopic; - } - - public Integer getOffsetStorageReplicationFactor() { - return offsetStorageReplicationFactor; - } - - public String getStatusStorageTopic() { - return statusStorageTopic; - } - - public Integer getStatusStorageReplicationFactor() { - return statusStorageReplicationFactor; - } - - public void setAuthProperties(Properties authProperties) { - this.authProperties = authProperties; - } - - public Map buildKafkaConnectPropertyMap() { - final String producerPrefix = "producer."; - Map workerProps = new HashMap<>(); - if (authProperties != null) { - authProperties.forEach((k, v) -> { - if (k == WorkerConfig.BOOTSTRAP_SERVERS_CONFIG) { - this.setBootstrapServers(v.toString()); - return; - } - if (v instanceof Class) { - workerProps.put(k.toString(), ((Class) v).getName()); - workerProps.put(producerPrefix + k, ((Class) v).getName()); - return; - } - workerProps.put(k.toString(), v.toString()); - workerProps.put(producerPrefix + k, v.toString()); - }); - } - workerProps.put("bootstrap.servers", this.getBootstrapServers()); - workerProps.put("group.id", this.getGroupId()); - workerProps.put("client.id", this.getClientId()); - workerProps.put("offset.storage.topic", this.getOffsetStorageTopic()); - workerProps.put("offset.storage.replication.factor", this.getOffsetStorageReplicationFactor().toString()); - workerProps.put("config.storage.topic", this.getConfigStorageTopic()); - workerProps.put("config.storage.replication.factor", this.getConfigStorageReplicationFactor().toString()); - workerProps.put("status.storage.topic", this.getStatusStorageTopic()); - workerProps.put("status.storage.replication.factor", this.getStatusStorageReplicationFactor().toString()); - workerProps.put("key.converter", this.getKeyConverter()); - workerProps.put("key.converter.schemas.enable", this.getKeyConverterSchemasEnable()); - if (this.getKeyConverterSchemaRegistryUrl() != null) { - workerProps.put("key.converter.schema.registry.url", this.getKeyConverterSchemaRegistryUrl()); - } - workerProps.put("value.converter", this.getValueConverter()); - workerProps.put("value.converter.schemas.enable", this.getValueConverterSchemasEnable()); - if (this.getValueConverterSchemaRegistryUrl() != null) { - workerProps.put("value.converter.schema.registry.url", this.getValueConverterSchemaRegistryUrl()); - } - workerProps.put("offset.storage.partitions", this.getOffsetStoragePartitions().toString()); - workerProps.put("offset.flush.interval.ms", this.getOffsetFlushInterval().toString()); - workerProps.put("offset.flush.timeout.ms", this.getOffsetFlushTimeout().toString()); - workerProps.put("status.storage.partitions", this.getStatusStoragePartitions().toString()); - workerProps.put("heartbeat.interval.ms", this.getHeartBeatInterval().toString()); - workerProps.put("session.timeout.ms", this.getSessionTimeout().toString()); - workerProps.put("scheduled.rebalance.max.delay.ms", this.getRebalanceMaxDelay().toString()); - return workerProps; - } -} - diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/meter/KafkaConnectMetrics.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/meter/KafkaConnectMetrics.java deleted file mode 100644 index db7250c259..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/meter/KafkaConnectMetrics.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.meter; - -import io.micrometer.core.instrument.Tag; -import io.micrometer.core.instrument.Tags; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.management.MBeanServer; -import javax.management.MBeanServerFactory; -import javax.management.MalformedObjectNameException; -import javax.management.ObjectName; -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.function.BiFunction; - -import static java.util.Collections.emptyList; - -public class KafkaConnectMetrics { - private static final Logger LOG = LoggerFactory.getLogger(KafkaConnectMetrics.class); - private static final String JMX_DOMAIN = "kafka.connect"; - private static final String CONNECT_WORKER_METRICS_NAME = "connect-worker-metrics"; - private static final List CONNECT_WORKER_METRICS_LIST = List.of( - "task-count", - "connector-count", - "connector-startup-attempts-total", - "connector-startup-success-total", - "connector-startup-failure-total", - "task-startup-attempts-total", - "task-startup-success-total", - "task-startup-failure-total" - ); - private static final String SOURCE_TASK_METRICS_NAME = "source-task-metrics"; - private static final List SOURCE_TASK_METRICS_LIST = List.of( - "source-record-write-total", - "source-record-write-rate", - "source-record-poll-total", - "source-record-poll-rate", - "source-record-active-count-max", - "source-record-active-count-avg", - "source-record-active-count" - ); - private static final String CLIENT_ID_KEY = "client-id"; - private static final String CLIENT_ID = "client.id"; - private static final String NODE_ID_KEY = "node-id"; - private static final String NODE_ID = "node.id"; - private static final String CONNECTOR = "connector"; - private static final String TASK = "task"; - - private final PluginMetrics pluginMetrics; - - private final MBeanServer mBeanServer; - - private final Iterable tags; - - - public KafkaConnectMetrics(final PluginMetrics pluginMetrics) { - this(pluginMetrics, emptyList()); - } - - public KafkaConnectMetrics(final PluginMetrics pluginMetrics, - final Iterable tags) { - this(pluginMetrics, getMBeanServer(), tags); - } - - public KafkaConnectMetrics(final PluginMetrics pluginMetrics, - final MBeanServer mBeanServer, - final Iterable tags) { - this.pluginMetrics = pluginMetrics; - this.mBeanServer = mBeanServer; - this.tags = tags; - } - - private static MBeanServer getMBeanServer() { - List mBeanServers = MBeanServerFactory.findMBeanServer(null); - if (!mBeanServers.isEmpty()) { - return mBeanServers.get(0); - } - return ManagementFactory.getPlatformMBeanServer(); - } - - private static String sanitize(String value) { - return value.replaceAll("-", "."); - } - - public void bindConnectMetrics() { - registerMetricsEventually(CONNECT_WORKER_METRICS_NAME, (o, tags) -> { - CONNECT_WORKER_METRICS_LIST.forEach( - (metricName) -> registerFunctionGaugeForObject(o, metricName, tags) - ); - return null; - }); - } - - public void bindConnectorMetrics() { - registerMetricsEventually(SOURCE_TASK_METRICS_NAME, (o, tags) -> { - SOURCE_TASK_METRICS_LIST.forEach( - (metricName) -> registerFunctionGaugeForObject(o, metricName, tags) - ); - return null; - }); - } - - private void registerMetricsEventually(String type, - BiFunction perObject) { - try { - Set objs = mBeanServer.queryNames(new ObjectName(JMX_DOMAIN + ":type=" + type + ",*"), null); - if (!objs.isEmpty()) { - for (ObjectName o : objs) { - perObject.apply(o, Tags.concat(tags, nameTag(o))); - } - } - } catch (MalformedObjectNameException e) { - throw new RuntimeException("Error registering Kafka Connect JMX based metrics", e); - } - } - - private Iterable nameTag(ObjectName name) { - Tags tags = Tags.empty(); - - String clientId = name.getKeyProperty(CLIENT_ID_KEY); - if (clientId != null) { - tags = Tags.concat(tags, CLIENT_ID, clientId); - } - - String nodeId = name.getKeyProperty(NODE_ID_KEY); - if (nodeId != null) { - tags = Tags.concat(tags, NODE_ID, nodeId); - } - - String connectorName = name.getKeyProperty(CONNECTOR); - if (connectorName != null) { - tags = Tags.concat(tags, CONNECTOR, connectorName); - } - - String taskName = name.getKeyProperty(TASK); - if (taskName != null) { - tags = Tags.concat(tags, TASK, taskName); - } - - return tags; - } - - private void registerFunctionGaugeForObject(ObjectName o, String jmxMetricName, Tags allTags) { - pluginMetrics.gaugeWithTags( - sanitize(jmxMetricName), - allTags, - mBeanServer, - s -> safeDouble(() -> s.getAttribute(o, jmxMetricName)) - ); - } - - private double safeDouble(Callable callable) { - try { - if (callable.call() == null) return Double.NaN; - return Double.parseDouble(callable.call().toString()); - } catch (Exception e) { - return Double.NaN; - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/KafkaConnectSource.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/KafkaConnectSource.java deleted file mode 100644 index 03370a359a..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/KafkaConnectSource.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.apache.kafka.connect.runtime.WorkerConfig; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.model.source.Source; -import org.opensearch.dataprepper.plugins.kafka.extension.KafkaClusterConfigSupplier; -import org.opensearch.dataprepper.plugins.kafka.util.KafkaSecurityConfigurer; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.ConnectorConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.KafkaConnectConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.KafkaConnectConfigSupplier; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.WorkerProperties; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.KafkaConnect; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Properties; - -/** - * The abstraction of the kafka connect source. - * The kafka connect and connectors are configured and runs async here. - */ -@SuppressWarnings("deprecation") -public abstract class KafkaConnectSource implements Source> { - private static final Logger LOG = LoggerFactory.getLogger(KafkaConnectSource.class); - public final ConnectorConfig connectorConfig; - private final String pipelineName; - private KafkaConnectConfig kafkaConnectConfig; - private KafkaConnect kafkaConnect; - - public KafkaConnectSource(final ConnectorConfig connectorConfig, - final PluginMetrics pluginMetrics, - final PipelineDescription pipelineDescription, - final KafkaClusterConfigSupplier kafkaClusterConfigSupplier, - final KafkaConnectConfigSupplier kafkaConnectConfigSupplier) { - this.connectorConfig = connectorConfig; - this.pipelineName = pipelineDescription.getPipelineName(); - if (shouldStartKafkaConnect()) { - if (kafkaClusterConfigSupplier == null || kafkaConnectConfigSupplier == null) { - throw new IllegalArgumentException("Extensions: KafkaClusterConfig and KafkaConnectConfig cannot be null"); - } - this.kafkaConnectConfig = kafkaConnectConfigSupplier.getConfig(); - this.updateConfig(kafkaClusterConfigSupplier); - this.kafkaConnect = KafkaConnect.getPipelineInstance( - pipelineName, - pluginMetrics, - kafkaConnectConfig.getConnectStartTimeout(), - kafkaConnectConfig.getConnectorStartTimeout()); - } - } - - @Override - public void start(Buffer> buffer) { - if (shouldStartKafkaConnect()) { - LOG.info("Starting Kafka Connect Source for pipeline: {}", pipelineName); - // Please make sure buildWokerProperties is always first to execute. - final WorkerProperties workerProperties = this.kafkaConnectConfig.getWorkerProperties(); - Map workerProps = workerProperties.buildKafkaConnectPropertyMap(); - if (workerProps.get(WorkerConfig.BOOTSTRAP_SERVERS_CONFIG) == null || workerProps.get(WorkerConfig.BOOTSTRAP_SERVERS_CONFIG).isEmpty()) { - throw new IllegalArgumentException("Bootstrap Servers cannot be null or empty"); - } - final List connectors = this.connectorConfig.buildConnectors(); - kafkaConnect.addConnectors(connectors); - kafkaConnect.initialize(workerProps); - kafkaConnect.start(); - } - } - - @Override - public void stop() { - if (shouldStartKafkaConnect()) { - LOG.info("Stopping Kafka Connect Source for pipeline: {}", pipelineName); - kafkaConnect.stop(); - } - } - - public boolean shouldStartKafkaConnect() { - return true; - } - - private void updateConfig(final KafkaClusterConfigSupplier kafkaClusterConfigSupplier) { - if (kafkaConnectConfig.getBootstrapServers() == null) { - this.kafkaConnectConfig.setBootstrapServers(kafkaClusterConfigSupplier.getBootStrapServers()); - } - if (kafkaConnectConfig.getAuthConfig() == null) { - kafkaConnectConfig.setAuthConfig(kafkaClusterConfigSupplier.getAuthConfig()); - } - if (kafkaConnectConfig.getAwsConfig() == null) { - kafkaConnectConfig.setAwsConfig(kafkaClusterConfigSupplier.getAwsConfig()); - } - if (kafkaConnectConfig.getEncryptionConfig() == null) { - kafkaConnectConfig.setEncryptionConfig(kafkaClusterConfigSupplier.getEncryptionConfig()); - } - Properties authProperties = new Properties(); - KafkaSecurityConfigurer.setAuthProperties(authProperties, kafkaConnectConfig, LOG); - this.kafkaConnectConfig.setAuthProperties(authProperties); - // Update Connector Config - if (Objects.nonNull(kafkaConnectConfig.getBootstrapServers())) { - this.connectorConfig.setBootstrapServers(String.join(",", kafkaConnectConfig.getBootstrapServers())); - } - this.connectorConfig.setAuthProperties(authProperties); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MongoDBSource.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MongoDBSource.java deleted file mode 100644 index 2099bd1e2c..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MongoDBSource.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.opensearch.dataprepper.aws.api.AwsCredentialsSupplier; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSetManager; -import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; -import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.codec.ByteDecoder; -import org.opensearch.dataprepper.model.codec.JsonDecoder; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.model.source.Source; -import org.opensearch.dataprepper.model.source.coordinator.SourceCoordinator; -import org.opensearch.dataprepper.model.source.coordinator.UsesSourceCoordination; -import org.opensearch.dataprepper.plugins.kafka.extension.KafkaClusterConfigSupplier; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.KafkaConnectConfigSupplier; -import org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB.MongoDBService; -import org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB.MongoDBSnapshotProgressState; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; -import java.util.Objects; - -/** - * The starting point of the mysql source which ingest CDC data using Kafka Connect and Debezium Connector. - */ -@SuppressWarnings("deprecation") -@DataPrepperPlugin(name = "mongodb", pluginType = Source.class, pluginConfigurationType = MongoDBConfig.class) -public class MongoDBSource extends KafkaConnectSource implements UsesSourceCoordination { - private static final Logger LOG = LoggerFactory.getLogger(MongoDBSource.class); - private static final String COLLECTION_SPLITTER = "\\."; - - private final AwsCredentialsSupplier awsCredentialsSupplier; - - private final PluginMetrics pluginMetrics; - - private final AcknowledgementSetManager acknowledgementSetManager; - - private MongoDBService mongoDBService; - - private SourceCoordinator sourceCoordinator; - - private ByteDecoder byteDecoder; - - @DataPrepperPluginConstructor - public MongoDBSource(final MongoDBConfig mongoDBConfig, - final PluginMetrics pluginMetrics, - final PipelineDescription pipelineDescription, - final AcknowledgementSetManager acknowledgementSetManager, - final AwsCredentialsSupplier awsCredentialsSupplier, - final KafkaClusterConfigSupplier kafkaClusterConfigSupplier, - final KafkaConnectConfigSupplier kafkaConnectConfigSupplier) { - super(mongoDBConfig, pluginMetrics, pipelineDescription, kafkaClusterConfigSupplier, kafkaConnectConfigSupplier); - this.pluginMetrics = pluginMetrics; - this.acknowledgementSetManager = acknowledgementSetManager; - this.awsCredentialsSupplier = awsCredentialsSupplier; - this.byteDecoder = new JsonDecoder(); - this.validateCollections(); - } - - @Override - public void start(Buffer> buffer) { - super.start(buffer); - if (shouldStartInitialLoad()) { - LOG.info("Starting initial load"); - this.mongoDBService = MongoDBService.create((MongoDBConfig) this.connectorConfig, sourceCoordinator, buffer, acknowledgementSetManager, pluginMetrics); - this.mongoDBService.start(); - } - } - - @Override - public void stop(){ - super.stop(); - if (shouldStartInitialLoad() && Objects.nonNull(mongoDBService) && Objects.nonNull(sourceCoordinator)) { - LOG.info("Stopping initial load"); - mongoDBService.stop(); - sourceCoordinator.giveUpPartitions(); - } - } - - @Override - public void setSourceCoordinator(final SourceCoordinator sourceCoordinator) { - this.sourceCoordinator = (SourceCoordinator) sourceCoordinator; - } - - @Override - public Class getPartitionProgressStateClass() { - return MongoDBSnapshotProgressState.class; - } - - @Override - public ByteDecoder getDecoder() { - return byteDecoder; - } - - @Override - public boolean shouldStartKafkaConnect() { - final MongoDBConfig mongoDBConfig = (MongoDBConfig) this.connectorConfig; - return mongoDBConfig.getIngestionMode() == MongoDBConfig.IngestionMode.EXPORT_STREAM - || mongoDBConfig.getIngestionMode() == MongoDBConfig.IngestionMode.STREAM; - } - - private boolean shouldStartInitialLoad() { - final MongoDBConfig mongoDBConfig = (MongoDBConfig) this.connectorConfig; - return mongoDBConfig.getIngestionMode() == MongoDBConfig.IngestionMode.EXPORT_STREAM - || mongoDBConfig.getIngestionMode() == MongoDBConfig.IngestionMode.EXPORT; - } - - private void validateCollections() { - MongoDBConfig config = (MongoDBConfig) this.connectorConfig; - List collectionConfigs = config.getCollections(); - collectionConfigs.forEach(collectionConfig -> { - List collection = List.of(collectionConfig.getCollectionName().split(COLLECTION_SPLITTER)); - if (collection.size() < 2) { - throw new IllegalArgumentException("Invalid Collection Name. Must be in db.collection format"); - } - }); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MySQLSource.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MySQLSource.java deleted file mode 100644 index 5dbcd16f4e..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MySQLSource.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; -import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.model.source.Source; -import org.opensearch.dataprepper.plugins.kafka.extension.KafkaClusterConfigSupplier; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MySQLConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.KafkaConnectConfigSupplier; - -/** - * The starting point of the mysql source which ingest CDC data using Kafka Connect and Debezium Connector. - */ -@SuppressWarnings("deprecation") -@DataPrepperPlugin(name = "mysql", pluginType = Source.class, pluginConfigurationType = MySQLConfig.class) -public class MySQLSource extends KafkaConnectSource { - - @DataPrepperPluginConstructor - public MySQLSource(final MySQLConfig mySQLConfig, - final PluginMetrics pluginMetrics, - final PipelineDescription pipelineDescription, - final KafkaClusterConfigSupplier kafkaClusterConfigSupplier, - final KafkaConnectConfigSupplier kafkaConnectConfigSupplier) { - super(mySQLConfig, pluginMetrics, pipelineDescription, kafkaClusterConfigSupplier, kafkaConnectConfigSupplier); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/PostgreSQLSource.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/PostgreSQLSource.java deleted file mode 100644 index 7e7c24d08c..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/PostgreSQLSource.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; -import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.model.source.Source; -import org.opensearch.dataprepper.plugins.kafka.extension.KafkaClusterConfigSupplier; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.PostgreSQLConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.KafkaConnectConfigSupplier; - -/** - * The starting point of the mysql source which ingest CDC data using Kafka Connect and Debezium Connector. - */ -@SuppressWarnings("deprecation") -@DataPrepperPlugin(name = "postgresql", pluginType = Source.class, pluginConfigurationType = PostgreSQLConfig.class) -public class PostgreSQLSource extends KafkaConnectSource { - - @DataPrepperPluginConstructor - public PostgreSQLSource(final PostgreSQLConfig postgreSQLConfig, - final PluginMetrics pluginMetrics, - final PipelineDescription pipelineDescription, - final KafkaClusterConfigSupplier kafkaClusterConfigSupplier, - final KafkaConnectConfigSupplier kafkaConnectConfigSupplier) { - super(postgreSQLConfig, pluginMetrics, pipelineDescription, kafkaClusterConfigSupplier, kafkaConnectConfigSupplier); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBHelper.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBHelper.java deleted file mode 100644 index 96af2c7663..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBHelper.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import org.bson.Document; -import org.bson.conversions.Bson; -import org.bson.types.BSONTimestamp; -import org.bson.types.Binary; -import org.bson.types.Code; -import org.bson.types.Decimal128; -import org.bson.types.ObjectId; -import org.bson.types.Symbol; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; - -import static com.mongodb.client.model.Filters.and; -import static com.mongodb.client.model.Filters.gte; -import static com.mongodb.client.model.Filters.lte; - -public class MongoDBHelper { - private static final String MONGO_CONNECTION_STRING_TEMPLATE = "mongodb://%s:%s@%s:%s/?replicaSet=rs0&directConnection=true&readpreference=%s&ssl=%s&tlsAllowInvalidHostnames=%s"; - private static final String BINARY_PARTITION_FORMAT = "%s-%s"; - private static final String BINARY_PARTITION_SPLITTER = "-"; - private static final String TIMESTAMP_PARTITION_FORMAT = "%s-%s"; - private static final String TIMESTAMP_PARTITION_SPLITTER = "-"; - - public static MongoClient getMongoClient(final MongoDBConfig mongoDBConfig) { - String username = mongoDBConfig.getCredentialsConfig().getUsername(); - String password = mongoDBConfig.getCredentialsConfig().getPassword(); - String hostname = mongoDBConfig.getHostname(); - String port = mongoDBConfig.getPort(); - String ssl = mongoDBConfig.getSSLEnabled().toString(); - String invalidHostAllowed = mongoDBConfig.getSSLInvalidHostAllowed().toString(); - String readPreference = mongoDBConfig.getExportConfig().getReadPreference(); - String connectionString = String.format(MONGO_CONNECTION_STRING_TEMPLATE, username, password, hostname, port, readPreference, ssl, invalidHostAllowed); - - return MongoClients.create(connectionString); - } - - public static String getPartitionStringFromMongoDBId(Object id, String className) { - switch (className) { - case "org.bson.Document": - return ((Document) id).toJson(); - case "org.bson.types.Binary": - final byte type = ((Binary) id).getType(); - final byte[] data = ((Binary) id).getData(); - String typeString = String.valueOf((int) type); - String dataString = new String(data); - return String.format(BINARY_PARTITION_FORMAT, typeString, dataString); - case "org.bson.types.BSONTimestamp": - final int inc = ((BSONTimestamp) id).getInc(); - final int time = ((BSONTimestamp) id).getTime(); - return String.format(TIMESTAMP_PARTITION_FORMAT, inc, time); - case "org.bson.types.code": - return ((Code) id).getCode(); - default: - return id.toString(); - } - } - - public static Bson buildAndQuery(String gte, String lte, String className) { - switch (className) { - case "java.lang.Integer": - return and( - gte("_id", Integer.parseInt(gte)), - lte("_id", Integer.parseInt(lte)) - ); - case "java.lang.Double": - return and( - gte("_id", Double.parseDouble(gte)), - lte("_id", Double.parseDouble(lte)) - ); - case "java.lang.String": - return and( - gte("_id", gte), - lte("_id", lte) - ); - case "java.lang.Long": - return and( - gte("_id", Long.parseLong(gte)), - lte("_id", Long.parseLong(lte)) - ); - case "org.bson.types.ObjectId": - return and( - gte("_id", new ObjectId(gte)), - lte("_id", new ObjectId(lte)) - ); - case "org.bson.types.Decimal128": - return and( - gte("_id", Decimal128.parse(gte)), - lte("_id", Decimal128.parse(lte)) - ); - case "org.bson.types.Binary": - String[] gteString = gte.split(BINARY_PARTITION_SPLITTER, 2); - String[] lteString = lte.split(BINARY_PARTITION_SPLITTER, 2); - return and( - gte("_id", new Binary(Byte.parseByte(gteString[0]), gteString[1].getBytes())), - lte("_id", new Binary(Byte.parseByte(lteString[0]), lteString[1].getBytes())) - ); - case "org.bson.types.BSONTimestamp": - String[] gteTimestampString = gte.split(TIMESTAMP_PARTITION_SPLITTER, 2); - String[] lteTimestampString = lte.split(TIMESTAMP_PARTITION_SPLITTER, 2); - return and( - gte("_id", new BSONTimestamp(Integer.parseInt(gteTimestampString[0]), Integer.parseInt(gteTimestampString[1]))), - lte("_id", new BSONTimestamp(Integer.parseInt(lteTimestampString[0]), Integer.parseInt(lteTimestampString[1]))) - ); - case "org.bson.types.code": - return and( - gte("_id", new Code(gte)), - lte("_id", new Code(lte)) - ); - case "org.bson.types.Symbol": - return and( - gte("_id", new Symbol(gte)), - lte("_id", new Symbol(lte)) - ); - case "org.bson.Document": - return and( - gte("_id", Document.parse(gte)), - lte("_id", Document.parse(lte)) - ); - default: - throw new RuntimeException("Unexpected _id class supported: " + className); - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBPartitionCreationSupplier.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBPartitionCreationSupplier.java deleted file mode 100644 index e585bb8348..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBPartitionCreationSupplier.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import com.mongodb.client.FindIterable; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.MongoCursor; -import com.mongodb.client.MongoDatabase; -import com.mongodb.client.model.Filters; -import org.bson.Document; -import org.opensearch.dataprepper.model.source.coordinator.PartitionIdentifier; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -public class MongoDBPartitionCreationSupplier implements Function, List> { - public static final String GLOBAL_STATE_PARTITIONED_COLLECTION_KEY = "partitionedCollections"; - private static final Logger LOG = LoggerFactory.getLogger(MongoDBPartitionCreationSupplier.class); - private static final String MONGODB_PARTITION_KEY_FORMAT = "%s|%s|%s|%s"; // partition format: ||| - private static final String COLLECTION_SPLITTER = "\\."; - - private final MongoDBConfig mongoDBConfig; - - public MongoDBPartitionCreationSupplier(final MongoDBConfig mongoDBConfig) { - this.mongoDBConfig = mongoDBConfig; - } - - @Override - public List apply(final Map globalStateMap) { - Map partitionedCollections = (Map) globalStateMap.getOrDefault(GLOBAL_STATE_PARTITIONED_COLLECTION_KEY, new HashMap<>()); - List collectionsToInitPartitions = this.getCollectionsToInitPartitions(mongoDBConfig, partitionedCollections); - - if (collectionsToInitPartitions.isEmpty()) { - return Collections.emptyList(); - } - - final List allPartitionIdentifiers = collectionsToInitPartitions - .parallelStream() - .flatMap(collectionName -> { - List partitions = this.buildPartitions(collectionName); - partitionedCollections.put(collectionName, Instant.now().toEpochMilli()); - return partitions.stream(); - }) - .collect(Collectors.toList()); - - globalStateMap.put(GLOBAL_STATE_PARTITIONED_COLLECTION_KEY, partitionedCollections); - return allPartitionIdentifiers; - } - - private List getCollectionsToInitPartitions(final MongoDBConfig mongoDBConfig, - final Map partitionedCollections) { - return mongoDBConfig.getCollections() - .stream() - .map(MongoDBConfig.CollectionConfig::getCollectionName) - .filter(collectionName -> !partitionedCollections.containsKey(collectionName)) - .collect(Collectors.toList()); - } - - private List buildPartitions(final String collectionName) { - List collectionPartitions = new ArrayList<>(); - List collection = List.of(collectionName.split(COLLECTION_SPLITTER)); - if (collection.size() < 2) { - throw new IllegalArgumentException("Invalid Collection Name. Must be in db.collection format"); - } - try (MongoClient mongoClient = MongoDBHelper.getMongoClient(mongoDBConfig)) { - MongoDatabase db = mongoClient.getDatabase(collection.get(0)); - MongoCollection col = db.getCollection(collection.get(1)); - int chunkSize = this.mongoDBConfig.getExportConfig().getItemsPerPartition(); - FindIterable startIterable = col.find() - .projection(new Document("_id", 1)) - .sort(new Document("_id", 1)) - .limit(1); - while (true) { - try (MongoCursor startCursor = startIterable.iterator()) { - if (!startCursor.hasNext()) { - break; - } - Document startDoc = startCursor.next(); - Object gteValue = startDoc.get("_id"); - String className = gteValue.getClass().getName(); - - // Get end doc - Document endDoc = startIterable.skip(chunkSize - 1).limit(1).first(); - if (endDoc == null) { - // this means we have reached the end of the doc - endDoc = col.find() - .projection(new Document("_id", 1)) - .sort(new Document("_id", -1)) - .limit(1) - .first(); - } - if (endDoc == null) { - break; - } - - Object lteValue = endDoc.get("_id"); - String gteValueString = MongoDBHelper.getPartitionStringFromMongoDBId(gteValue, className); - String lteValueString = MongoDBHelper.getPartitionStringFromMongoDBId(lteValue, className); - LOG.info("Chunk of " + collectionName + ": {gte: " + gteValueString + ", lte: " + lteValueString + "}"); - collectionPartitions.add( - PartitionIdentifier - .builder() - .withPartitionKey(String.format(MONGODB_PARTITION_KEY_FORMAT, collectionName, gteValueString, lteValueString, className)) - .build()); - - startIterable = col.find(Filters.gt("_id", lteValue)) - .projection(new Document("_id", 1)) - .sort(new Document("_id", 1)) - .limit(1); - } catch (Exception e) { - LOG.error("Failed to read start cursor when build partitions", e); - throw new RuntimeException(e); - } - } - } - return collectionPartitions; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBService.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBService.java deleted file mode 100644 index 07dc439d53..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBService.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSetManager; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.model.source.coordinator.SourceCoordinator; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Duration; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; - -public class MongoDBService { - private static final Logger LOG = LoggerFactory.getLogger(MongoDBService.class); - private static final Duration EXECUTOR_SERVICE_SHUTDOWN_TIMEOUT = Duration.ofSeconds(30); - private final PluginMetrics pluginMetrics; - private final MongoDBConfig mongoDBConfig; - private final Buffer> buffer; - private final MongoDBPartitionCreationSupplier mongoDBPartitionCreationSupplier; - private final ScheduledExecutorService scheduledExecutorService; - private final SourceCoordinator sourceCoordinator; - private final AcknowledgementSetManager acknowledgementSetManager; - private MongoDBSnapshotWorker snapshotWorker; - private ScheduledFuture snapshotWorkerFuture; - - - private MongoDBService( - final MongoDBConfig mongoDBConfig, - final SourceCoordinator sourceCoordinator, - final Buffer> buffer, - final ScheduledExecutorService scheduledExecutorService, - final AcknowledgementSetManager acknowledgementSetManager, - final PluginMetrics pluginMetrics) { - this.pluginMetrics = pluginMetrics; - this.mongoDBConfig = mongoDBConfig; - this.buffer = buffer; - this.scheduledExecutorService = scheduledExecutorService; - this.acknowledgementSetManager = acknowledgementSetManager; - this.sourceCoordinator = sourceCoordinator; - this.sourceCoordinator.initialize(); - this.mongoDBPartitionCreationSupplier = new MongoDBPartitionCreationSupplier(mongoDBConfig); - } - - public static MongoDBService create( - final MongoDBConfig mongoDBConfig, - final SourceCoordinator sourceCoordinator, - final Buffer> buffer, - final AcknowledgementSetManager acknowledgementSetManager, - final PluginMetrics pluginMetrics) { - return new MongoDBService( - mongoDBConfig, - sourceCoordinator, - buffer, - Executors.newSingleThreadScheduledExecutor(), - acknowledgementSetManager, - pluginMetrics); - } - - public void start() { - snapshotWorker = new MongoDBSnapshotWorker( - sourceCoordinator, - buffer, - mongoDBPartitionCreationSupplier, - pluginMetrics, - acknowledgementSetManager, - mongoDBConfig); - snapshotWorkerFuture = scheduledExecutorService.schedule(() -> snapshotWorker.run(), 0L, TimeUnit.MILLISECONDS); - } - - public void stop() { - scheduledExecutorService.shutdown(); - try { - snapshotWorkerFuture.cancel(true); - if (scheduledExecutorService.awaitTermination(EXECUTOR_SERVICE_SHUTDOWN_TIMEOUT.getSeconds(), TimeUnit.SECONDS)) { - LOG.info("Successfully waited for the snapshot worker to terminate"); - } else { - LOG.warn("snapshot worker did not terminate in time, forcing termination"); - scheduledExecutorService.shutdownNow(); - } - } catch (InterruptedException e) { - LOG.error("Interrupted while waiting for the snapshot worker to terminate", e); - scheduledExecutorService.shutdownNow(); - } - - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotProgressState.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotProgressState.java deleted file mode 100644 index 95ea8ba67e..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotProgressState.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import com.fasterxml.jackson.annotation.JsonProperty; - -public class MongoDBSnapshotProgressState { - @JsonProperty("totalRecords") - private long total; - @JsonProperty("successRecords") - private long success; - @JsonProperty("failedRecords") - private long failed; - - public long getTotal() { - return total; - } - - public long getSuccess() { - return success; - } - - public long getFailed() { - return failed; - } - - public void setTotal(long total) { - this.total = total; - } - - public void setSuccess(long successRecords) { - this.success = successRecords; - } - - public void setFailure(long failedRecords) { - this.failed = failedRecords; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotWorker.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotWorker.java deleted file mode 100644 index 32819857eb..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotWorker.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.MongoCursor; -import com.mongodb.client.MongoDatabase; -import io.micrometer.core.instrument.Counter; -import org.bson.Document; -import org.bson.conversions.Bson; -import org.bson.json.JsonMode; -import org.bson.json.JsonWriterSettings; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSet; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSetManager; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.event.Event; -import org.opensearch.dataprepper.model.event.JacksonEvent; -import org.opensearch.dataprepper.model.opensearch.OpenSearchBulkActions; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.model.source.coordinator.SourceCoordinator; -import org.opensearch.dataprepper.model.source.coordinator.SourcePartition; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Duration; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -public class MongoDBSnapshotWorker implements Runnable { - private static final Logger LOG = LoggerFactory.getLogger(MongoDBSnapshotWorker.class); - private static final Duration BACKOFF_ON_EXCEPTION = Duration.ofSeconds(60); - private static final Duration BACKOFF_ON_EMPTY_PARTITION = Duration.ofSeconds(60); - private static final Duration ACKNOWLEDGEMENT_SET_TIMEOUT = Duration.ofHours(2); - private static final String SUCCESS_ITEM_COUNTER_NAME = "exportRecordsSuccessTotal"; - private static final String FAILURE_ITEM_COUNTER_NAME = "exportRecordsFailedTotal"; - private static final String SUCCESS_PARTITION_COUNTER_NAME = "exportPartitionSuccessTotal"; - private static final String FAILURE_PARTITION_COUNTER_NAME = "exportPartitionFailureTotal"; - private static final String EVENT_SOURCE_COLLECTION_ATTRIBUTE = "__collection"; - private static final String EVENT_SOURCE_DB_ATTRIBUTE = "__source_db"; - private static final String EVENT_SOURCE_OPERATION = "__op"; - private static final String EVENT_SOURCE_TS_MS = "__source_ts_ms"; - private static final String EVENT_TYPE = "EXPORT"; - private static final String PARTITION_KEY_SPLITTER = "\\|"; - private static final String COLLECTION_SPLITTER = "\\."; - private final SourceCoordinator sourceCoordinator; - private static int DEFAULT_BUFFER_WRITE_TIMEOUT_MS = 5000; - private final Buffer> buffer; - private final MongoDBPartitionCreationSupplier mongoDBPartitionCreationSupplier; - private final AcknowledgementSetManager acknowledgementSetManager; - private final MongoDBConfig mongoDBConfig; - private final Counter successItemsCounter; - private final Counter failureItemsCounter; - private final Counter successPartitionCounter; - private final Counter failureParitionCounter; - private final ObjectMapper objectMapper = new ObjectMapper(); - private final TypeReference> mapTypeReference = new TypeReference>() { - }; - - - public MongoDBSnapshotWorker(final SourceCoordinator sourceCoordinator, - final Buffer> buffer, - final MongoDBPartitionCreationSupplier mongoDBPartitionCreationSupplier, - final PluginMetrics pluginMetrics, - final AcknowledgementSetManager acknowledgementSetManager, - final MongoDBConfig mongoDBConfig) { - this.sourceCoordinator = sourceCoordinator; - this.buffer = buffer; - this.mongoDBPartitionCreationSupplier = mongoDBPartitionCreationSupplier; - this.acknowledgementSetManager = acknowledgementSetManager; - this.mongoDBConfig = mongoDBConfig; - this.successItemsCounter = pluginMetrics.counter(SUCCESS_ITEM_COUNTER_NAME); - this.failureItemsCounter = pluginMetrics.counter(FAILURE_ITEM_COUNTER_NAME); - this.successPartitionCounter = pluginMetrics.counter(SUCCESS_PARTITION_COUNTER_NAME); - this.failureParitionCounter = pluginMetrics.counter(FAILURE_PARTITION_COUNTER_NAME); - } - - @Override - public void run() { - while (!Thread.currentThread().isInterrupted()) { - try { - final Optional> snapshotPartition = sourceCoordinator.getNextPartition(mongoDBPartitionCreationSupplier); - if (snapshotPartition.isEmpty()) { - try { - LOG.info("get empty partition"); - Thread.sleep(BACKOFF_ON_EMPTY_PARTITION.toMillis()); - continue; - } catch (final InterruptedException e) { - LOG.info("The worker was interrupted while sleeping after acquiring no indices to process, stopping processing"); - return; - } - } - LOG.info("get partition success {}", snapshotPartition.get().getPartitionKey()); - try { - final Optional acknowledgementSet = createAcknowledgementSet(snapshotPartition.get()); - this.startProcessPartition(snapshotPartition.get()); - if (acknowledgementSet.isEmpty()) { - sourceCoordinator.completePartition(snapshotPartition.get().getPartitionKey(), false); - } else { - sourceCoordinator.updatePartitionForAcknowledgmentWait(snapshotPartition.get().getPartitionKey(), ACKNOWLEDGEMENT_SET_TIMEOUT); - acknowledgementSet.get().complete(); - } - successPartitionCounter.increment(); - } catch (final Exception e) { - LOG.error("Received an exception while processing the partition.", e); - sourceCoordinator.giveUpPartitions(); - failureParitionCounter.increment(); - } - } catch (final Exception e) { - LOG.error("Received an exception while trying to snapshot documentDB, backing off and retrying", e); - try { - Thread.sleep(BACKOFF_ON_EXCEPTION.toMillis()); - } catch (final InterruptedException ex) { - LOG.info("The DocumentDBSnapshotWorker was interrupted before backing off and retrying, stopping processing"); - return; - } - } - } - } - - private void startProcessPartition(SourcePartition partition) { - List partitionKeys = List.of(partition.getPartitionKey().split(PARTITION_KEY_SPLITTER)); - if (partitionKeys.size() < 4) { - throw new RuntimeException("Invalid Partition Key. Must as db.collection|gte|lte format."); - } - List collection = List.of(partitionKeys.get(0).split(COLLECTION_SPLITTER)); - final String gte = partitionKeys.get(1); - final String lte = partitionKeys.get(2); - final String className = partitionKeys.get(3); - if (collection.size() < 2) { - throw new RuntimeException("Invalid Collection Name. Must as db.collection format"); - } - try (MongoClient mongoClient = MongoDBHelper.getMongoClient(mongoDBConfig)) { - MongoDatabase db = mongoClient.getDatabase(collection.get(0)); - MongoCollection col = db.getCollection(collection.get(1)); - Bson query = MongoDBHelper.buildAndQuery(gte, lte, className); - long totalRecords = 0L; - long successRecords = 0L; - long failedRecords = 0L; - try (MongoCursor cursor = col.find(query).iterator()) { - while (cursor.hasNext()) { - totalRecords += 1; - try { - JsonWriterSettings writerSettings = JsonWriterSettings.builder() - .outputMode(JsonMode.RELAXED) - .objectIdConverter((value, writer) -> writer.writeString(value.toHexString())) - .build(); - String record = cursor.next().toJson(writerSettings); - Map data = convertToMap(record); - data.putIfAbsent(EVENT_SOURCE_DB_ATTRIBUTE, collection.get(0)); - data.putIfAbsent(EVENT_SOURCE_COLLECTION_ATTRIBUTE, collection.get(1)); - data.putIfAbsent(EVENT_SOURCE_OPERATION, OpenSearchBulkActions.CREATE.toString()); - data.putIfAbsent(EVENT_SOURCE_TS_MS, 0); - if (buffer.isByteBuffer()) { - buffer.writeBytes(objectMapper.writeValueAsBytes(data), null, DEFAULT_BUFFER_WRITE_TIMEOUT_MS); - } else { - buffer.write(getEventFromData(data), DEFAULT_BUFFER_WRITE_TIMEOUT_MS); - } - successItemsCounter.increment(); - successRecords += 1; - } catch (Exception e) { - LOG.error("failed to add record to buffer with error {}", e.getMessage()); - failureItemsCounter.increment(); - failedRecords += 1; - } - } - final MongoDBSnapshotProgressState progressState = new MongoDBSnapshotProgressState(); - progressState.setTotal(totalRecords); - progressState.setSuccess(successRecords); - progressState.setFailure(failedRecords); - sourceCoordinator.saveProgressStateForPartition(partition.getPartitionKey(), progressState); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - } - - private Optional createAcknowledgementSet(SourcePartition partition) { - if (mongoDBConfig.getExportConfig().getAcknowledgements()) { - return Optional.of(this.acknowledgementSetManager.create((result) -> { - if (result) { - this.sourceCoordinator.completePartition(partition.getPartitionKey(), true); - } - }, ACKNOWLEDGEMENT_SET_TIMEOUT)); - } - return Optional.empty(); - } - - private Map convertToMap(String jsonData) throws JsonProcessingException { - return objectMapper.readValue(jsonData, mapTypeReference); - } - - private Record getEventFromData(Map data) { - Event event = JacksonEvent.builder() - .withEventType(EVENT_TYPE) - .withData(data) - .build(); - return new Record<>(event); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/Connector.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/Connector.java deleted file mode 100644 index a413a89bbd..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/Connector.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.util; - -import java.util.Map; - -public class Connector { - private final String name; - private final Map config; - private final Boolean allowReplace; - - public Connector(final String name, final Map config, final Boolean allowReplace) { - this.name = name; - this.config = config; - this.allowReplace = allowReplace; - } - - public String getName() { - return this.name; - } - - public Map getConfig() { - config.putIfAbsent("name", name); - return config; - } - - public Boolean getAllowReplace() { - return allowReplace; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/KafkaConnect.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/KafkaConnect.java deleted file mode 100644 index 3a1e82bace..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/KafkaConnect.java +++ /dev/null @@ -1,375 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.util; - -import org.apache.kafka.common.utils.Time; -import org.apache.kafka.connect.connector.policy.ConnectorClientConfigOverridePolicy; -import org.apache.kafka.connect.errors.AlreadyExistsException; -import org.apache.kafka.connect.errors.NotFoundException; -import org.apache.kafka.connect.json.JsonConverter; -import org.apache.kafka.connect.json.JsonConverterConfig; -import org.apache.kafka.connect.runtime.Connect; -import org.apache.kafka.connect.runtime.Worker; -import org.apache.kafka.connect.runtime.WorkerConfig; -import org.apache.kafka.connect.runtime.WorkerConfigTransformer; -import org.apache.kafka.connect.runtime.distributed.DistributedConfig; -import org.apache.kafka.connect.runtime.distributed.DistributedHerder; -import org.apache.kafka.connect.runtime.distributed.NotLeaderException; -import org.apache.kafka.connect.runtime.isolation.Plugins; -import org.apache.kafka.connect.runtime.rest.ConnectRestServer; -import org.apache.kafka.connect.runtime.rest.RestClient; -import org.apache.kafka.connect.runtime.rest.RestServer; -import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; -import org.apache.kafka.connect.storage.ConfigBackingStore; -import org.apache.kafka.connect.storage.Converter; -import org.apache.kafka.connect.storage.KafkaConfigBackingStore; -import org.apache.kafka.connect.storage.KafkaOffsetBackingStore; -import org.apache.kafka.connect.storage.KafkaStatusBackingStore; -import org.apache.kafka.connect.storage.StatusBackingStore; -import org.apache.kafka.connect.util.ConnectUtils; -import org.apache.kafka.connect.util.SharedTopicAdmin; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.plugins.kafkaconnect.meter.KafkaConnectMetrics; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.URI; -import java.time.Clock; -import java.time.Duration; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import static org.apache.kafka.clients.CommonClientConfigs.CLIENT_ID_CONFIG; - -/** - * The KafkaConnect infra. - * Unique with single instance for each pipeline. - */ -public class KafkaConnect { - private static final Logger LOG = LoggerFactory.getLogger(KafkaConnect.class); - private static volatile Map instanceMap = new HashMap<>(); - private static final long RETRY_INTERVAL_MS = 3000L; // 3 seconds - private static final int LATCH_WAIT_TIME = 1; // 1 minute - private static final String RUNNING = "RUNNING"; - private final Map connectorMap; - private final KafkaConnectMetrics kafkaConnectMetrics; - private final Time time = Time.SYSTEM; - private final Clock clock = Clock.systemUTC(); - private DistributedHerder herder; - private RestServer rest; - private Connect connect; - private final long connectTimeoutMs; // 60 seconds - private final long connectorTimeoutMs; // 30 seconds - - private KafkaConnect(final PluginMetrics pluginMetrics, - final Duration connectTimeout, - final Duration connectorTimeout) { - this.connectorMap = new HashMap<>(); - this.kafkaConnectMetrics = new KafkaConnectMetrics(pluginMetrics); - this.connectTimeoutMs = connectTimeout.toMillis(); - this.connectorTimeoutMs = connectorTimeout.toMillis(); - } - - /** - * For Testing - */ - public KafkaConnect(final DistributedHerder herder, - final RestServer rest, - final Connect connect, - final KafkaConnectMetrics kafkaConnectMetrics) { - this.connectorMap = new HashMap<>(); - this.herder = herder; - this.rest = rest; - this.connect = connect; - this.kafkaConnectMetrics = kafkaConnectMetrics; - this.connectTimeoutMs = 60000L; - this.connectorTimeoutMs = 30000L; - } - - public static KafkaConnect getPipelineInstance(final String pipelineName, - final PluginMetrics pluginMetrics, - final Duration connectTimeout, - final Duration connectorTimeout) { - KafkaConnect instance = instanceMap.get(pipelineName); - if (instance == null) { - synchronized (KafkaConnect.class) { - instance = new KafkaConnect(pluginMetrics, connectTimeout, connectorTimeout); - instanceMap.put(pipelineName, instance); - } - } - return instance; - } - - public synchronized void initialize(Map workerProps) { - DistributedConfig config = new DistributedConfig(workerProps); - RestClient restClient = new RestClient(config); - this.rest = new ConnectRestServer(config.rebalanceTimeout(), restClient, workerProps); - this.herder = initHerder(workerProps, config, restClient); - this.connect = new Connect(herder, (ConnectRestServer) rest); - } - - /** - * Add connectors to the Kafka Connect. - * This must be done before the start() is called. - * - * @param connectors connectors to be added. - */ - public void addConnectors(List connectors) { - connectors.forEach(connector -> { - this.connectorMap.put(connector.getName(), connector); - }); - } - - /** - * Start the kafka connect. - * Will add all connectors, and cleanup unused connectors at once. - */ - public synchronized void start() { - if (this.connect == null) { - throw new RuntimeException("Please initialize Kafka Connect first."); - } - if (this.connect.isRunning()) { - LOG.info("Kafka Connect is running, will not start again"); - return; - } - LOG.info("Starting Kafka Connect"); - try { - this.rest.initializeServer(); - this.connect.start(); - waitForConnectRunning(); - this.kafkaConnectMetrics.bindConnectMetrics(); - this.initConnectors(); - } catch (Exception e) { - LOG.error("Failed to start Connect", e); - this.connect.stop(); - throw new RuntimeException(e); - } - } - - /** - * Stop the Kafka Connect. - */ - public void stop() { - if (this.connect == null) { - LOG.info("Kafka Connect is running, will not start again"); - return; - } - LOG.info("Stopping Kafka Connect"); - this.connect.stop(); - } - - private DistributedHerder initHerder(Map workerProps, DistributedConfig config, RestClient restClient) { - LOG.info("Scanning for plugin classes. This might take a moment ..."); - Plugins plugins = new Plugins(workerProps); - plugins.compareAndSwapWithDelegatingLoader(); - String kafkaClusterId = config.kafkaClusterId(); - LOG.info("Kafka cluster ID: {}", kafkaClusterId); - - URI advertisedUrl = rest.advertisedUrl(); - String workerId = advertisedUrl.getHost() + ":" + advertisedUrl.getPort(); - - String clientIdBase = ConnectUtils.clientIdBase(config); - - // Create the admin client to be shared by all backing stores. - Map adminProps = new HashMap<>(config.originals()); - ConnectUtils.addMetricsContextProperties(adminProps, config, kafkaClusterId); - adminProps.put(CLIENT_ID_CONFIG, clientIdBase + "shared-admin"); - SharedTopicAdmin sharedAdmin = new SharedTopicAdmin(adminProps); - - KafkaOffsetBackingStore offsetBackingStore = new KafkaOffsetBackingStore(sharedAdmin, () -> clientIdBase, - plugins.newInternalConverter(true, JsonConverter.class.getName(), - Collections.singletonMap(JsonConverterConfig.SCHEMAS_ENABLE_CONFIG, "false"))); - offsetBackingStore.configure(config); - - ConnectorClientConfigOverridePolicy connectorClientConfigOverridePolicy = plugins.newPlugin( - config.getString(WorkerConfig.CONNECTOR_CLIENT_POLICY_CLASS_CONFIG), - config, ConnectorClientConfigOverridePolicy.class); - - Worker worker = new Worker(workerId, time, plugins, config, offsetBackingStore, connectorClientConfigOverridePolicy); - WorkerConfigTransformer configTransformer = worker.configTransformer(); - - Converter internalValueConverter = worker.getInternalValueConverter(); - StatusBackingStore statusBackingStore = new KafkaStatusBackingStore(time, internalValueConverter, sharedAdmin, clientIdBase); - statusBackingStore.configure(config); - - ConfigBackingStore configBackingStore = new KafkaConfigBackingStore( - internalValueConverter, - config, - configTransformer, - sharedAdmin, - clientIdBase); - - // Pass the shared admin to the distributed herder as an additional AutoCloseable object that should be closed when the - // herder is stopped. This is easier than having to track and own the lifecycle ourselves. - return new DistributedHerder(config, time, worker, - kafkaClusterId, statusBackingStore, configBackingStore, - advertisedUrl.toString(), restClient, connectorClientConfigOverridePolicy, - Collections.emptyList(), sharedAdmin); - } - - /** - * - * @throws InterruptedException - */ - private void waitForConnectRunning() throws InterruptedException { - long startTime = clock.millis(); - boolean isRunning = false; - while (clock.millis() - startTime < connectTimeoutMs) { - LOG.info("Waiting Kafka Connect running"); - isRunning = this.connect.isRunning(); - if (isRunning) break; - TimeUnit.MILLISECONDS.sleep(RETRY_INTERVAL_MS); - } - if (!isRunning) { - throw new RuntimeException("Timed out waiting for Kafka Connect running"); - } - LOG.info("Kafka Connect is running"); - } - - /** - * Initialize connectors. - * The Kafka Connectors are managed in orders: - * 1. Delete Connectors not in pipeline configurations - * 2. Register Connectors - * 3. Wait for all connectors in running state. - * 4. Bind connectors' metrics - */ - private void initConnectors() throws InterruptedException { - this.deleteConnectors(); - this.registerConnectors(); - this.waitForConnectorsRunning(); - this.kafkaConnectMetrics.bindConnectorMetrics(); - } - - /** - * Register Connector to Kafka Connect. - * Designed as private method to prevent register the connector after connect is started. - */ - private void registerConnectors() throws InterruptedException { - CountDownLatch connectorLatch = new CountDownLatch(connectorMap.size()); - List exceptionMessages = new ArrayList<>(); - connectorMap.forEach((connectorName, connector) -> { - herder.connectorConfig(connectorName, (e, config) -> { - boolean shouldUpdate; - if (config == null) { - shouldUpdate = true; - } else { - shouldUpdate = connector.getAllowReplace() || (!config.equals(connector.getConfig())); - } - herder.putConnectorConfig(connectorName, connector.getConfig(), shouldUpdate, (error, result) -> { - if (error != null) { - if (error instanceof NotLeaderException || error instanceof AlreadyExistsException) { - LOG.info(error.getMessage()); - } else { - LOG.error("Failed to put connector config: {}", connectorName); - exceptionMessages.add(error.getMessage()); - } - } else { - // Handle the successful registration - LOG.info("Success put connector config: {}", connectorName); - } - connectorLatch.countDown(); - }); - }); - }); - // Block and wait for all tasks to complete - if (!connectorLatch.await(LATCH_WAIT_TIME, TimeUnit.MINUTES)) { - throw new RuntimeException("Timed out waiting for initConnectors"); - } else { - if (!exceptionMessages.isEmpty()) { - throw new RuntimeException(String.join(", ", exceptionMessages)); - } - LOG.info("InitConnectors completed"); - } - } - - /** - * Delete Connectors from Kafka Connect. - * Designed as private method to prevent delete the connector after connect is started. - */ - private void deleteConnectors() throws InterruptedException { - Collection connectorsToDelete = this.herder.connectors() - .stream() - .filter(connectorName -> !connectorMap.containsKey(connectorName)) - .collect(Collectors.toList()); - List exceptionMessages = new ArrayList<>(); - CountDownLatch deleteLatch = new CountDownLatch(connectorsToDelete.size()); - connectorsToDelete.forEach(connectorName -> { - herder.deleteConnectorConfig(connectorName, (error, result) -> { - if (error != null) { - if (error instanceof NotLeaderException || error instanceof NotFoundException) { - LOG.info(error.getMessage()); - } else { - LOG.error("Failed to delete connector config: {}", connectorName); - exceptionMessages.add(error.getMessage()); - } - } else { - // Handle the successful registration - LOG.info("Success delete connector config: {}", connectorName); - } - deleteLatch.countDown(); - }); - }); - // Block and wait for all tasks to complete - if (!deleteLatch.await(LATCH_WAIT_TIME, TimeUnit.MINUTES)) { - throw new RuntimeException("Timed out waiting for deleteConnectors"); - } else { - if (!exceptionMessages.isEmpty()) { - throw new RuntimeException(String.join(", ", exceptionMessages)); - } - LOG.info("deleteConnectors completed"); - } - } - - private void waitForConnectorsRunning() throws InterruptedException { - LOG.info("Waiting for connectors to be running"); - Set connectorNames = this.connectorMap.keySet(); - List exceptionMessages = new ArrayList<>(); - CountDownLatch countDownLatch = new CountDownLatch(connectorNames.size()); - connectorNames.parallelStream().forEach(connectorName -> { - long startTime = clock.millis(); - boolean isRunning = false; - while (clock.millis() - startTime < connectorTimeoutMs) { - try { - ConnectorStateInfo info = herder.connectorStatus(connectorName); - if (RUNNING.equals(info.connector().state())) { - // Connector is running, decrement the latch count - isRunning = true; - break; - } - } catch (Exception e) { - LOG.info(e.getMessage()); - } - try { - TimeUnit.MILLISECONDS.sleep(RETRY_INTERVAL_MS); - } catch (InterruptedException e) { - break; - } - } - countDownLatch.countDown(); - if (!isRunning) { - exceptionMessages.add(String.format("Connector %s is not running in desired period of time", connectorName)); - } - }); - // Block and wait for all tasks to complete - if (!countDownLatch.await(LATCH_WAIT_TIME, TimeUnit.MINUTES)) { - throw new RuntimeException("Timed out waiting for running state check"); - } else { - if (!exceptionMessages.isEmpty()) { - throw new RuntimeException(String.join(", ", exceptionMessages)); - } - LOG.info("All connectors are running"); - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/SecretManagerHelper.java b/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/SecretManagerHelper.java deleted file mode 100644 index a4a31c52bb..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/main/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/SecretManagerHelper.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.util; - -import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; -import software.amazon.awssdk.core.retry.RetryPolicy; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.secretsmanager.SecretsManagerClient; -import software.amazon.awssdk.services.secretsmanager.model.GetSecretValueRequest; -import software.amazon.awssdk.services.secretsmanager.model.GetSecretValueResponse; -import software.amazon.awssdk.services.sts.StsClient; -import software.amazon.awssdk.services.sts.auth.StsAssumeRoleCredentialsProvider; -import software.amazon.awssdk.services.sts.model.AssumeRoleRequest; - -import java.util.UUID; - -public class SecretManagerHelper { - private static final String SESSION_PREFIX = "data-prepper-secretmanager-session"; - public static String getSecretValue(final String stsRoleArn, final String region, final String secretId) { - AwsCredentialsProvider credentialsProvider = DefaultCredentialsProvider.create(); - ClientOverrideConfiguration clientOverrideConfiguration = ClientOverrideConfiguration - .builder() - .retryPolicy(RetryPolicy.defaultRetryPolicy()) - .build(); - - if (stsRoleArn != null && !stsRoleArn.isEmpty()) { - String sessionName = SESSION_PREFIX + UUID.randomUUID(); - StsClient stsClient = StsClient.builder() - .overrideConfiguration(clientOverrideConfiguration) - .region(Region.of(region)) - .credentialsProvider(credentialsProvider) - .build(); - AssumeRoleRequest assumeRoleRequest = AssumeRoleRequest - .builder() - .roleArn(stsRoleArn) - .roleSessionName(sessionName) - .build(); - credentialsProvider = StsAssumeRoleCredentialsProvider - .builder() - .stsClient(stsClient) - .refreshRequest(assumeRoleRequest) - .build(); - } - SecretsManagerClient secretsManagerClient = SecretsManagerClient.builder() - .overrideConfiguration(clientOverrideConfiguration) - .credentialsProvider(credentialsProvider) - .region(Region.of(region)) - .build(); - final GetSecretValueRequest request = GetSecretValueRequest.builder().secretId(secretId).build(); - final GetSecretValueResponse response = secretsManagerClient.getSecretValue(request); - return response.secretString(); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/CredentialConfigTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/CredentialConfigTest.java deleted file mode 100644 index e36af1e63a..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/CredentialConfigTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import org.junit.jupiter.api.Test; -import org.mockito.MockedStatic; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.SecretManagerHelper; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mockStatic; - -public class CredentialConfigTest { - private final String testUserName = "testUser"; - private final String testPassword = "testPassword"; - private final String testStsRole = "testRole"; - private final String testRegion = "testRegion"; - private final String testSecretId = "testSecritId"; - - @Test - void test_credential_config_plaintext() { - CredentialsConfig credentialsConfig = new CredentialsConfig( - new CredentialsConfig.PlainText(testUserName, testPassword), null); - assertThat(credentialsConfig.getUsername(), is(testUserName)); - assertThat(credentialsConfig.getPassword(), is(testPassword)); - } - - @Test - void test_credential_config_plaintext_invalid() { - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig( - new CredentialsConfig.PlainText(null, null), null)); - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig( - new CredentialsConfig.PlainText(testUserName, null), null)); - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig( - new CredentialsConfig.PlainText(null, testPassword), null)); - } - - @Test - void test_credential_config_secret_manager() { - final String expectedSecret = "{\"username\":\"expectedUsername\",\"password\":\"expectedPassword\"}"; - try (MockedStatic mockedStatic = mockStatic(SecretManagerHelper.class)) { - mockedStatic.when(() -> SecretManagerHelper.getSecretValue(testStsRole, testRegion, testSecretId)).thenReturn(expectedSecret); - CredentialsConfig credentialsConfig = new CredentialsConfig( - null, new CredentialsConfig.SecretManager(testStsRole, testRegion, testSecretId)); - assertThat(credentialsConfig.getUsername(), is("expectedUsername")); - assertThat(credentialsConfig.getPassword(), is("expectedPassword")); - } - } - - @Test - void test_credential_config_failure_on_secret_manager() { - try (MockedStatic mockedStatic = mockStatic(SecretManagerHelper.class)) { - mockedStatic.when(() -> SecretManagerHelper.getSecretValue(testStsRole, testRegion, testSecretId)).thenThrow(new RuntimeException()); - assertThrows(RuntimeException.class, () -> new CredentialsConfig( - null, new CredentialsConfig.SecretManager(testStsRole, testRegion, testSecretId))); - final String invalidSecret = "{}"; - mockedStatic.when(() -> SecretManagerHelper.getSecretValue(testStsRole, testRegion, testSecretId)).thenReturn(invalidSecret); - assertThrows(RuntimeException.class, () -> new CredentialsConfig( - null, new CredentialsConfig.SecretManager(testStsRole, testRegion, testSecretId))); - } - } - - @Test - void test_credential_config_secret_manager_invalid_input() { - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig( - null, new CredentialsConfig.SecretManager(null, null, null))); - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig( - null, new CredentialsConfig.SecretManager(null, null, testSecretId))); - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig( - null, new CredentialsConfig.SecretManager(null, testRegion, null))); - } - - @Test - void test_invalid_credential_config() { - // Must be set - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig(null, null)); - // Cannot both set - assertThrows(IllegalArgumentException.class, () -> new CredentialsConfig( - new CredentialsConfig.PlainText(testUserName, testPassword), - new CredentialsConfig.SecretManager(testStsRole, testRegion, testSecretId) - )); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MongoDBConfigTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MongoDBConfigTest.java deleted file mode 100644 index ef3bf843b8..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MongoDBConfigTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import org.junit.jupiter.api.Test; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; -import org.yaml.snakeyaml.Yaml; - -import java.io.FileReader; -import java.io.IOException; -import java.io.Reader; -import java.io.StringReader; -import java.util.Map; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; - -public class MongoDBConfigTest { - - @Test - public void test_get_mongodb_connectors() throws IOException { - MongoDBConfig testConfig = buildTestConfig("sample-mongodb-pipeline.yaml"); - assertThat(testConfig, notNullValue()); - assertThat(testConfig.buildConnectors(), notNullValue()); - assertThat(testConfig.buildConnectors().size(), is(1)); - // verify Connector - Connector mongodbConnector = testConfig.buildConnectors().get(0); - assertThat(mongodbConnector, instanceOf(Connector.class)); - final Map actualConfig = mongodbConnector.getConfig(); - assertThat(actualConfig.get("connector.class"), is(MongoDBConfig.CONNECTOR_CLASS)); - assertThat(actualConfig.get("mongodb.connection.string"), is("mongodb://localhost:27017/?replicaSet=rs0&directConnection=true")); - assertThat(actualConfig.get("mongodb.user"), is("debezium")); - assertThat(actualConfig.get("mongodb.password"), is("dbz")); - assertThat(actualConfig.get("snapshot.mode"), is("never")); - assertThat(actualConfig.get("topic.prefix"), is("prefix1")); - assertThat(actualConfig.get("collection.include.list"), is("test.customers")); - assertThat(actualConfig.get("mongodb.ssl.enabled"), is("false")); - } - - @Test - public void test_get_mongodb_config_props() throws IOException { - MongoDBConfig testConfig = buildTestConfig("sample-mongodb-pipeline.yaml"); - assertThat(testConfig, notNullValue()); - assertThat(testConfig.getIngestionMode(), is(MongoDBConfig.IngestionMode.EXPORT_STREAM)); - assertThat(testConfig.getCredentialsConfig().getUsername(), is("debezium")); - assertThat(testConfig.getHostname(), is("localhost")); - assertThat(testConfig.getPort(), is("27017")); - assertThat(testConfig.getSSLEnabled(), is(false)); - assertThat(testConfig.getSSLInvalidHostAllowed(), is(false)); - assertThat(testConfig.getCollections().size(), is(1)); - assertThat(testConfig.getExportConfig().getAcknowledgements(), is(false)); - assertThat(testConfig.getExportConfig().getItemsPerPartition(), is(4000)); - assertThat(testConfig.getExportConfig().getReadPreference(), is("secondaryPreferred")); - } - - private MongoDBConfig buildTestConfig(final String resourceFileName) throws IOException { - //Added to load Yaml file - Start - Yaml yaml = new Yaml(); - FileReader fileReader = new FileReader(getClass().getClassLoader().getResource(resourceFileName).getFile()); - Object data = yaml.load(fileReader); - if (data instanceof Map) { - Map propertyMap = (Map) data; - Map logPipelineMap = (Map) propertyMap.get("log-pipeline"); - Map sourceMap = (Map) logPipelineMap.get("source"); - Map kafkaConnectConfigMap = (Map) sourceMap.get("mongodb"); - ObjectMapper mapper = new ObjectMapper(); - mapper.registerModule(new JavaTimeModule()); - String json = mapper.writeValueAsString(kafkaConnectConfigMap); - Reader reader = new StringReader(json); - return mapper.readValue(reader, MongoDBConfig.class); - } - return null; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MySQLConfigTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MySQLConfigTest.java deleted file mode 100644 index fa4d3526da..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/MySQLConfigTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import org.junit.jupiter.api.Test; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; -import org.yaml.snakeyaml.Yaml; - -import java.io.FileReader; -import java.io.IOException; -import java.io.Reader; -import java.io.StringReader; -import java.util.Map; -import java.util.Properties; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; - -public class MySQLConfigTest { - @Test - public void test_get_mysql_connectors() throws IOException { - final String bootstrapServers = "localhost:9092"; - final Properties authProperties = new Properties(); - authProperties.put("bootstrap.servers", bootstrapServers); - authProperties.put("testClass", this.getClass()); - authProperties.put("testKey", "testValue"); - MySQLConfig testConfig = buildTestConfig("sample-mysql-pipeline.yaml"); - assertThat(testConfig, notNullValue()); - assertThat(testConfig.buildConnectors(), notNullValue()); - assertThat(testConfig.buildConnectors().size(), is(1)); - // verify Connector - testConfig.setAuthProperties(authProperties); - Connector mysqlConnector = testConfig.buildConnectors().get(0); - assertThat(mysqlConnector, instanceOf(Connector.class)); - final Map actualConfig = mysqlConnector.getConfig(); - assertThat(actualConfig.get("connector.class"), is(MySQLConfig.CONNECTOR_CLASS)); - assertThat(actualConfig.get("database.hostname"), is("localhost")); - assertThat(actualConfig.get("database.port"), is("3306")); - assertThat(actualConfig.get("database.user"), is("debezium")); - assertThat(actualConfig.get("database.password"), is("dbz")); - assertThat(actualConfig.get("snapshot.mode"), is("initial")); - assertThat(actualConfig.get("topic.prefix"), is("prefix1")); - assertThat(actualConfig.get("table.include.list"), is("inventory.customers")); - assertThat(actualConfig.get("schema.history.internal.kafka.bootstrap.servers"), is(bootstrapServers)); - assertThat(actualConfig.get("schema.history.internal.producer.testKey"), is(authProperties.getProperty("testKey"))); - assertThat(actualConfig.get("schema.history.internal.consumer.testKey"), is(authProperties.getProperty("testKey"))); - assertThat(actualConfig.get("schema.history.internal.producer.testClass"), is(this.getClass().getName())); - assertThat(actualConfig.get("schema.history.internal.consumer.testClass"), is(this.getClass().getName())); - } - - private MySQLConfig buildTestConfig(final String resourceFileName) throws IOException { - //Added to load Yaml file - Start - Yaml yaml = new Yaml(); - FileReader fileReader = new FileReader(getClass().getClassLoader().getResource(resourceFileName).getFile()); - Object data = yaml.load(fileReader); - if (data instanceof Map) { - Map propertyMap = (Map) data; - Map logPipelineMap = (Map) propertyMap.get("log-pipeline"); - Map sourceMap = (Map) logPipelineMap.get("source"); - Map kafkaConnectConfigMap = (Map) sourceMap.get("mysql"); - ObjectMapper mapper = new ObjectMapper(); - mapper.registerModule(new JavaTimeModule()); - String json = mapper.writeValueAsString(kafkaConnectConfigMap); - Reader reader = new StringReader(json); - return mapper.readValue(reader, MySQLConfig.class); - } - return null; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/PostgreSQLConfigTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/PostgreSQLConfigTest.java deleted file mode 100644 index 036cda30aa..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/configuration/PostgreSQLConfigTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.configuration; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import org.junit.jupiter.api.Test; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.Connector; -import org.yaml.snakeyaml.Yaml; - -import java.io.FileReader; -import java.io.IOException; -import java.io.Reader; -import java.io.StringReader; -import java.util.Map; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; - -public class PostgreSQLConfigTest { - @Test - public void test_get_postgresql_connectors() throws IOException { - PostgreSQLConfig testConfig = buildTestConfig("sample-postgres-pipeline.yaml"); - assertThat(testConfig, notNullValue()); - assertThat(testConfig.buildConnectors(), notNullValue()); - assertThat(testConfig.buildConnectors().size(), is(1)); - // verify Connector - Connector postgresqlConnector = testConfig.buildConnectors().get(0); - assertThat(postgresqlConnector, instanceOf(Connector.class)); - assertThat(postgresqlConnector.getName(), is("psql.public.customers")); - final Map actualConfig = postgresqlConnector.getConfig(); - assertThat(actualConfig.get("connector.class"), is(PostgreSQLConfig.CONNECTOR_CLASS)); - assertThat(actualConfig.get("plugin.name"), is("pgoutput")); - assertThat(actualConfig.get("database.hostname"), is("localhost")); - assertThat(actualConfig.get("database.port"), is("5432")); - assertThat(actualConfig.get("database.user"), is("debezium")); - assertThat(actualConfig.get("database.password"), is("dbz")); - assertThat(actualConfig.get("snapshot.mode"), is("initial")); - assertThat(actualConfig.get("topic.prefix"), is("psql")); - assertThat(actualConfig.get("database.dbname"), is("postgres")); - assertThat(actualConfig.get("table.include.list"), is("public.customers")); - } - - private PostgreSQLConfig buildTestConfig(final String resourceFileName) throws IOException { - //Added to load Yaml file - Start - Yaml yaml = new Yaml(); - FileReader fileReader = new FileReader(getClass().getClassLoader().getResource(resourceFileName).getFile()); - Object data = yaml.load(fileReader); - if (data instanceof Map) { - Map propertyMap = (Map) data; - Map logPipelineMap = (Map) propertyMap.get("log-pipeline"); - Map sourceMap = (Map) logPipelineMap.get("source"); - Map kafkaConnectConfigMap = (Map) sourceMap.get("postgresql"); - ObjectMapper mapper = new ObjectMapper(); - mapper.registerModule(new JavaTimeModule()); - String json = mapper.writeValueAsString(kafkaConnectConfigMap); - Reader reader = new StringReader(json); - return mapper.readValue(reader, PostgreSQLConfig.class); - } - return null; - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/DefaultKafkaConnectConfigSupplierTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/DefaultKafkaConnectConfigSupplierTest.java deleted file mode 100644 index d4bfb32504..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/DefaultKafkaConnectConfigSupplierTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.MatcherAssert.assertThat; - -@ExtendWith(MockitoExtension.class) -public class DefaultKafkaConnectConfigSupplierTest { - @Mock - private KafkaConnectConfig kafkaConnectConfig; - - private DefaultKafkaConnectConfigSupplier createObjectUnderTest() { - return new DefaultKafkaConnectConfigSupplier(kafkaConnectConfig); - } - - @Test - void test_get_config() { - assertThat(createObjectUnderTest().getConfig(), equalTo(kafkaConnectConfig)); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigExtensionTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigExtensionTest.java deleted file mode 100644 index e2a705978b..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigExtensionTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.model.plugin.ExtensionPoints; -import org.opensearch.dataprepper.model.plugin.ExtensionProvider; - -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.mockito.Mockito.verify; - -@ExtendWith(MockitoExtension.class) -public class KafkaConnectConfigExtensionTest { - @Mock - private ExtensionPoints extensionPoints; - - @Mock - private KafkaConnectConfig kafkaConnectConfig; - - private KafkaConnectConfigExtension createObjectUnderTest() { - return new KafkaConnectConfigExtension(kafkaConnectConfig); - } - - @Test - void apply_should_addExtensionProvider() { - createObjectUnderTest().apply(extensionPoints); - final ArgumentCaptor extensionProviderArgumentCaptor = - ArgumentCaptor.forClass(ExtensionProvider.class); - - verify(extensionPoints).addExtensionProvider(extensionProviderArgumentCaptor.capture()); - - final ExtensionProvider actualExtensionProvider = extensionProviderArgumentCaptor.getValue(); - - assertThat(actualExtensionProvider, instanceOf(KafkaConnectConfigProvider.class)); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigProviderTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigProviderTest.java deleted file mode 100644 index 3162f0d193..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigProviderTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.model.plugin.ExtensionProvider; - -import java.util.Optional; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.CoreMatchers.sameInstance; -import static org.hamcrest.MatcherAssert.assertThat; - -@ExtendWith(MockitoExtension.class) -public class KafkaConnectConfigProviderTest { - @Mock - private KafkaConnectConfigSupplier kafkaConnectConfigSupplier; - - @Mock - private ExtensionProvider.Context context; - - private KafkaConnectConfigProvider createObjectUnderTest() { - return new KafkaConnectConfigProvider(kafkaConnectConfigSupplier); - } - - @Test - void supportedClass_returns_kafkaConnectConfigSupplier() { - assertThat(createObjectUnderTest().supportedClass(), equalTo(KafkaConnectConfigSupplier.class)); - } - - @Test - void provideInstance_returns_the_kafkaConnectConfigSupplier_from_the_constructor() { - final KafkaConnectConfigProvider objectUnderTest = createObjectUnderTest(); - - final Optional optionalKafkaConnectConfigSupplier = objectUnderTest.provideInstance(context); - assertThat(optionalKafkaConnectConfigSupplier, notNullValue()); - assertThat(optionalKafkaConnectConfigSupplier.isPresent(), equalTo(true)); - assertThat(optionalKafkaConnectConfigSupplier.get(), equalTo(kafkaConnectConfigSupplier)); - - final Optional anotherOptionalKafkaConnectConfigSupplier = objectUnderTest.provideInstance(context); - assertThat(anotherOptionalKafkaConnectConfigSupplier, notNullValue()); - assertThat(anotherOptionalKafkaConnectConfigSupplier.isPresent(), equalTo(true)); - assertThat(anotherOptionalKafkaConnectConfigSupplier.get(), sameInstance(optionalKafkaConnectConfigSupplier.get())); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigTest.java deleted file mode 100644 index 672cb43903..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/extension/KafkaConnectConfigTest.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.extension; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import org.hamcrest.CoreMatchers; -import org.junit.jupiter.api.Test; -import org.opensearch.dataprepper.model.types.ByteCount; -import org.opensearch.dataprepper.parser.ByteCountDeserializer; -import org.opensearch.dataprepper.parser.DataPrepperDurationDeserializer; -import org.opensearch.dataprepper.parser.model.DataPrepperConfiguration; -import org.opensearch.dataprepper.plugins.kafka.configuration.AuthConfig; -import org.opensearch.dataprepper.plugins.kafka.configuration.AwsConfig; -import org.opensearch.dataprepper.plugins.kafka.configuration.EncryptionConfig; - -import java.io.File; -import java.io.IOException; -import java.io.Reader; -import java.io.StringReader; -import java.time.Duration; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class KafkaConnectConfigTest { - - private static SimpleModule simpleModule = new SimpleModule() - .addDeserializer(Duration.class, new DataPrepperDurationDeserializer()) - .addDeserializer(ByteCount.class, new ByteCountDeserializer()); - private static ObjectMapper OBJECT_MAPPER = new ObjectMapper(new YAMLFactory()).registerModule(simpleModule); - - private KafkaConnectConfig makeConfig(String filePath) throws IOException { - final File configurationFile = new File(filePath); - final DataPrepperConfiguration dataPrepperConfiguration = OBJECT_MAPPER.readValue(configurationFile, DataPrepperConfiguration.class); - assertThat(dataPrepperConfiguration, CoreMatchers.notNullValue()); - assertThat(dataPrepperConfiguration.getPipelineExtensions(), CoreMatchers.notNullValue()); - final Map kafkaConnectConfigMap = (Map) dataPrepperConfiguration.getPipelineExtensions().getExtensionMap().get("kafka_connect_config"); - String json = OBJECT_MAPPER.writeValueAsString(kafkaConnectConfigMap); - Reader reader = new StringReader(json); - return OBJECT_MAPPER.readValue(reader, KafkaConnectConfig.class); - } - - @Test - public void test_config_setter_getter() throws IOException { - KafkaConnectConfig testConfig = makeConfig("src/test/resources/sample-data-prepper-config-with-kafka-connect-config-extension.yaml"); - AuthConfig authConfig = new AuthConfig(); - AwsConfig awsConfig = new AwsConfig(); - EncryptionConfig encryptionConfig = new EncryptionConfig(); - List bootstrapServer = List.of("testhost:123"); - testConfig.setAuthConfig(authConfig); - testConfig.setAwsConfig(awsConfig); - testConfig.setEncryptionConfig(encryptionConfig); - testConfig.setBootstrapServers(bootstrapServer); - assertThat(testConfig.getAuthConfig(), is(authConfig)); - assertThat(testConfig.getAwsConfig(), is(awsConfig)); - assertThat(testConfig.getEncryptionConfig(), is(encryptionConfig)); - assertThat(testConfig.getBootstrapServers(), is(bootstrapServer)); - assertThat(testConfig.getConnectorStartTimeout().getSeconds(), is(3L)); - assertThat(testConfig.getConnectStartTimeout().getSeconds(), is(3L)); - } - - @Test - public void test_config_get_worker_properties() throws IOException { - final String bootstrapServers = "localhost:9092"; - final Properties authProperties = new Properties(); - authProperties.put("bootstrap.servers", bootstrapServers); - authProperties.put("testClass", KafkaConnectConfigTest.class); - authProperties.put("testKey", "testValue"); - KafkaConnectConfig testConfig = makeConfig("src/test/resources/sample-data-prepper-config-with-kafka-connect-config-extension.yaml"); - testConfig.setAuthProperties(authProperties); - // verify WorkerProperties - assertThat(testConfig.getWorkerProperties(), notNullValue()); - Map workerProperties = testConfig.getWorkerProperties().buildKafkaConnectPropertyMap(); - assertThat(workerProperties.get("bootstrap.servers"), is(bootstrapServers)); - assertThat(workerProperties.get("group.id"), is("test-group")); - assertThat(workerProperties.get("client.id"), is("test-client")); - assertThat(workerProperties.get("offset.storage.topic"), is("test-offsets")); - assertThat(workerProperties.get("config.storage.topic"), is("test-configs")); - assertThat(workerProperties.get("status.storage.topic"), is("test-status")); - assertThat(workerProperties.get("key.converter"), is("org.apache.kafka.connect.json.JsonConverter")); - assertThat(workerProperties.get("value.converter"), is("org.apache.kafka.connect.json.JsonConverter")); - assertThat(workerProperties.get("offset.storage.partitions"), is("2")); - assertThat(workerProperties.get("offset.flush.interval.ms"), is("6000")); - assertThat(workerProperties.get("offset.flush.timeout.ms"), is("500")); - assertThat(workerProperties.get("status.storage.partitions"), is("1")); - assertThat(workerProperties.get("heartbeat.interval.ms"), is("300")); - assertThat(workerProperties.get("session.timeout.ms"), is("3000")); - assertThat(workerProperties.get("scheduled.rebalance.max.delay.ms"), is("60000")); - assertThat(workerProperties.get("testClass"), is(this.getClass().getName())); - assertThat(workerProperties.get("producer.testClass"), is(this.getClass().getName())); - assertThat(workerProperties.get("testKey"), is(authProperties.getProperty("testKey"))); - assertThat(workerProperties.get("producer.testKey"), is(authProperties.getProperty("testKey"))); - } - - @Test - public void test_config_default_worker_properties() throws IOException { - KafkaConnectConfig testConfig = makeConfig("src/test/resources/sample-data-prepper-config-with-default-kafka-connect-config-extension.yaml"); - assertThat(testConfig, notNullValue()); - assertThat(testConfig.getConnectStartTimeout().getSeconds(), is(60L)); - assertThat(testConfig.getConnectorStartTimeout().getSeconds(), is(360L)); - assertThat(testConfig.getBootstrapServers(), nullValue()); - WorkerProperties testWorkerProperties = testConfig.getWorkerProperties(); - assertThat(testWorkerProperties, notNullValue()); - Map workerProperties = testWorkerProperties.buildKafkaConnectPropertyMap(); - assertThat(workerProperties.get("bootstrap.servers"), nullValue()); - assertThat(workerProperties.get("offset.storage.partitions"), is("25")); - assertThat(workerProperties.get("offset.flush.interval.ms"), is("60000")); - assertThat(workerProperties.get("offset.flush.timeout.ms"), is("5000")); - assertThat(workerProperties.get("status.storage.partitions"), is("5")); - assertThat(workerProperties.get("heartbeat.interval.ms"), is("3000")); - assertThat(workerProperties.get("session.timeout.ms"), is("30000")); - assertThat(workerProperties.get("scheduled.rebalance.max.delay.ms"), is("300000")); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/meter/KafkaConnectMetricsTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/meter/KafkaConnectMetricsTest.java deleted file mode 100644 index 55ef90db5d..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/meter/KafkaConnectMetricsTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.meter; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.opensearch.dataprepper.metrics.PluginMetrics; - -import javax.management.MBeanServer; -import javax.management.MalformedObjectNameException; -import javax.management.ObjectName; -import java.util.Set; - -import static java.util.Collections.emptyList; -import static java.util.Collections.emptySet; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.atLeastOnce; -import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class KafkaConnectMetricsTest { - @Mock - private PluginMetrics pluginMetrics; - - @Mock - private MBeanServer mBeanServer; - - private Iterable tags = emptyList(); - - @BeforeEach - void setUp() throws Exception { - pluginMetrics = mock(PluginMetrics.class); - mBeanServer = mock(MBeanServer.class); - lenient().when(mBeanServer.getAttribute(any(), any())).thenReturn(1); - } - - @Test - void testConstructor() { - assertThat(new KafkaConnectMetrics(pluginMetrics), notNullValue()); - when(mBeanServer.queryNames(any(), any())).thenReturn(emptySet()); - assertThat(new KafkaConnectMetrics(pluginMetrics, tags), notNullValue()); - } - - @Test - void testBindConnectMetrics() throws MalformedObjectNameException { - final KafkaConnectMetrics kafkaConnectMetrics = new KafkaConnectMetrics(pluginMetrics, mBeanServer, tags); - when(mBeanServer.queryNames(any(), any())).thenReturn(Set.of(new ObjectName("test:*"))); - kafkaConnectMetrics.bindConnectMetrics(); - verify(mBeanServer).queryNames(any(), any()); - verify(pluginMetrics, atLeastOnce()).gaugeWithTags(any(), any(), any(), any()); - } - - @Test - void testBindConnectorMetrics() throws MalformedObjectNameException { - final KafkaConnectMetrics kafkaConnectMetrics = new KafkaConnectMetrics(pluginMetrics, mBeanServer, tags); - when(mBeanServer.queryNames(any(), any())).thenReturn(Set.of(new ObjectName("test:type=test,connector=test,client-id=test1,node-id=test1,task=task1"))); - kafkaConnectMetrics.bindConnectorMetrics(); - verify(mBeanServer).queryNames(any(), any()); - verify(pluginMetrics, atLeastOnce()).gaugeWithTags(any(), any(), any(), any()); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/KafkaConnectSourceTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/KafkaConnectSourceTest.java deleted file mode 100644 index 2e52176fab..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/KafkaConnectSourceTest.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import org.mockito.junit.jupiter.MockitoExtension; -import org.mockito.stubbing.Answer; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.plugins.kafka.configuration.AuthConfig; -import org.opensearch.dataprepper.plugins.kafka.configuration.AwsConfig; -import org.opensearch.dataprepper.plugins.kafka.configuration.EncryptionConfig; -import org.opensearch.dataprepper.plugins.kafka.extension.KafkaClusterConfigSupplier; -import org.opensearch.dataprepper.plugins.kafka.util.KafkaSecurityConfigurer; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MySQLConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.KafkaConnectConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.KafkaConnectConfigSupplier; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.WorkerProperties; -import org.opensearch.dataprepper.plugins.kafkaconnect.util.KafkaConnect; - -import java.util.Collections; -import java.util.List; - -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockStatic; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -public class KafkaConnectSourceTest { - private final String TEST_PIPELINE_NAME = "test_pipeline"; - private KafkaConnectSource kafkaConnectSource; - - @Mock - private MySQLConfig mySQLConfig; - - @Mock - private KafkaConnectConfig kafkaConnectConfig; - - @Mock - private PluginMetrics pluginMetrics; - - @Mock - private PipelineDescription pipelineDescription; - - @Mock - private Buffer> buffer; - - @Mock - private KafkaConnect kafkaConnect; - - @Mock - private KafkaClusterConfigSupplier kafkaClusterConfigSupplier; - - @Mock - private KafkaConnectConfigSupplier kafkaConnectConfigSupplier; - - private String bootstrapServers = "localhost:9092"; - - public KafkaConnectSource createSourceUnderTest() { - return new MySQLSource(mySQLConfig, pluginMetrics, pipelineDescription, kafkaClusterConfigSupplier, kafkaConnectConfigSupplier); - } - - @BeforeEach - void setUp() { - WorkerProperties workerProperties = new WorkerProperties(); - workerProperties.setBootstrapServers(bootstrapServers); - kafkaConnectConfigSupplier = mock(KafkaConnectConfigSupplier.class); - lenient().when(kafkaConnectConfigSupplier.getConfig()).thenReturn(kafkaConnectConfig); - lenient().when(kafkaConnectConfig.getWorkerProperties()).thenReturn(workerProperties); - lenient().when(mySQLConfig.buildConnectors()).thenReturn(Collections.emptyList()); - - pipelineDescription = mock(PipelineDescription.class); - lenient().when(pipelineDescription.getPipelineName()).thenReturn(TEST_PIPELINE_NAME); - pluginMetrics = mock(PluginMetrics.class); - } - - @Test - void testStartKafkaConnectSource() throws InterruptedException { - try (MockedStatic mockedStatic = mockStatic(KafkaConnect.class); - MockedStatic mockedSecurityConfigurer = mockStatic(KafkaSecurityConfigurer.class)) { - mockedSecurityConfigurer.when(() -> KafkaSecurityConfigurer.setAuthProperties(any(), any(), any())).thenAnswer((Answer) invocation -> null); - kafkaConnect = mock(KafkaConnect.class); - doNothing().when(kafkaConnect).addConnectors(any()); - doNothing().when(kafkaConnect).start(); - doNothing().when(kafkaConnect).stop(); - // Set up the mock behavior for the static method getInstance() - mockedStatic.when(() -> KafkaConnect.getPipelineInstance(any(), any(), any(), any())).thenReturn(kafkaConnect); - kafkaConnectSource = createSourceUnderTest(); - kafkaConnectSource.start(buffer); - verify(kafkaConnect).addConnectors(any()); - verify(kafkaConnect).start(); - Thread.sleep(10); - kafkaConnectSource.stop(); - verify(kafkaConnect).stop(); - } - } - - @Test - void testStartKafkaConnectSourceError() { - WorkerProperties workerProperties = new WorkerProperties(); - workerProperties.setBootstrapServers(null); - lenient().when(kafkaConnectConfig.getWorkerProperties()).thenReturn(workerProperties); - try (MockedStatic mockedStatic = mockStatic(KafkaConnect.class); - MockedStatic mockedSecurityConfigurer = mockStatic(KafkaSecurityConfigurer.class)) { - mockedSecurityConfigurer.when(() -> KafkaSecurityConfigurer.setAuthProperties(any(), any(), any())).thenAnswer((Answer) invocation -> null); - kafkaConnect = mock(KafkaConnect.class); - // Set up the mock behavior for the static method getInstance() - mockedStatic.when(() -> KafkaConnect.getPipelineInstance(any(), any(), any(), any())).thenReturn(kafkaConnect); - kafkaConnectSource = createSourceUnderTest(); - assertThrows(IllegalArgumentException.class, () -> kafkaConnectSource.start(buffer)); - } - } - - @Test - void test_updateConfig_using_kafkaClusterConfigExtension() { - final List bootstrapServers = List.of("localhost:9092"); - final AuthConfig authConfig = mock(AuthConfig.class); - final AwsConfig awsConfig = mock(AwsConfig.class); - final EncryptionConfig encryptionConfig = mock(EncryptionConfig.class); - doNothing().when(kafkaConnectConfig).setBootstrapServers(any()); - doNothing().when(kafkaConnectConfig).setAuthConfig(any()); - doNothing().when(kafkaConnectConfig).setAwsConfig(any()); - doNothing().when(kafkaConnectConfig).setEncryptionConfig(any()); - when(kafkaConnectConfig.getAuthConfig()).thenReturn(null); - when(kafkaConnectConfig.getAwsConfig()).thenReturn(null); - when(kafkaConnectConfig.getEncryptionConfig()).thenReturn(null); - when(kafkaConnectConfig.getBootstrapServers()).thenReturn(null); - when(kafkaClusterConfigSupplier.getBootStrapServers()).thenReturn(bootstrapServers); - when(kafkaClusterConfigSupplier.getAuthConfig()).thenReturn(authConfig); - when(kafkaClusterConfigSupplier.getAwsConfig()).thenReturn(awsConfig); - when(kafkaClusterConfigSupplier.getEncryptionConfig()).thenReturn(encryptionConfig); - try (MockedStatic mockedStatic = mockStatic(KafkaSecurityConfigurer.class)) { - mockedStatic.when(() -> KafkaSecurityConfigurer.setAuthProperties(any(), any(), any())).thenAnswer((Answer) invocation -> null); - kafkaConnectSource = createSourceUnderTest(); - verify(kafkaConnectConfig).setBootstrapServers(bootstrapServers); - verify(kafkaConnectConfig).setAuthConfig(authConfig); - verify(kafkaConnectConfig).setAwsConfig(awsConfig); - verify(kafkaConnectConfig).setEncryptionConfig(encryptionConfig); - } - } - - @Test - void test_updateConfig_not_using_kafkaClusterConfigExtension() { - final List bootstrapServers = List.of("localhost:9092"); - final AuthConfig authConfig = mock(AuthConfig.class); - final AwsConfig awsConfig = mock(AwsConfig.class); - final EncryptionConfig encryptionConfig = mock(EncryptionConfig.class); - lenient().doNothing().when(kafkaConnectConfig).setBootstrapServers(any()); - lenient().doNothing().when(kafkaConnectConfig).setAuthConfig(any()); - lenient().doNothing().when(kafkaConnectConfig).setAwsConfig(any()); - lenient().doNothing().when(kafkaConnectConfig).setEncryptionConfig(any()); - lenient().when(kafkaConnectConfig.getAuthConfig()).thenReturn(authConfig); - lenient().when(kafkaConnectConfig.getAwsConfig()).thenReturn(awsConfig); - lenient().when(kafkaConnectConfig.getEncryptionConfig()).thenReturn(encryptionConfig); - lenient().when(kafkaConnectConfig.getBootstrapServers()).thenReturn(bootstrapServers); - try (MockedStatic mockedStatic = mockStatic(KafkaSecurityConfigurer.class)) { - mockedStatic.when(() -> KafkaSecurityConfigurer.setAuthProperties(any(), any(), any())).thenAnswer((Answer) invocation -> null); - kafkaConnectSource = createSourceUnderTest(); - verify(kafkaConnectConfig, never()).setBootstrapServers(any()); - verify(kafkaConnectConfig, never()).setAuthConfig(any()); - verify(kafkaConnectConfig, never()).setAwsConfig(any()); - verify(kafkaConnectConfig, never()).setEncryptionConfig(any()); - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MongoDBSourceTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MongoDBSourceTest.java deleted file mode 100644 index fde37870e2..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MongoDBSourceTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.aws.api.AwsCredentialsSupplier; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSetManager; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.codec.ByteDecoder; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.model.source.coordinator.SourceCoordinator; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB.MongoDBService; -import org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB.MongoDBSnapshotProgressState; - -import java.util.List; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockStatic; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -public class MongoDBSourceTest { - @Mock - private MongoDBConfig mongoDBConfig; - - @Mock - private PluginMetrics pluginMetrics; - - @Mock - private PipelineDescription pipelineDescription; - - @Mock - private AwsCredentialsSupplier awsCredentialsSupplier; - - @Mock - private AcknowledgementSetManager acknowledgementSetManager; - - @Mock - private SourceCoordinator sourceCoordinator; - - @Mock - private MongoDBService mongoDBService; - - @Mock - private Buffer> buffer; - - @BeforeEach - void setup() { - mongoDBConfig = mock(MongoDBConfig.class); - sourceCoordinator = mock(SourceCoordinator.class); - } - - @Test - void testConstructorValidations() { - when(mongoDBConfig.getIngestionMode()).thenReturn(MongoDBConfig.IngestionMode.EXPORT_STREAM); - assertThrows(IllegalArgumentException.class, () -> new MongoDBSource( - mongoDBConfig, - pluginMetrics, - pipelineDescription, - acknowledgementSetManager, - awsCredentialsSupplier, - null, - null)); - } - - @Test - void testConstructorValidations_invalidCollectionName() { - MongoDBConfig.CollectionConfig collectionConfig = mock(MongoDBConfig.CollectionConfig.class); - when(collectionConfig.getCollectionName()).thenReturn("invalidName"); - when(mongoDBConfig.getIngestionMode()).thenReturn(MongoDBConfig.IngestionMode.EXPORT); - when(mongoDBConfig.getCollections()).thenReturn(List.of(collectionConfig)); - assertThrows(IllegalArgumentException.class, () -> new MongoDBSource( - mongoDBConfig, - pluginMetrics, - pipelineDescription, - acknowledgementSetManager, - awsCredentialsSupplier, - null, - null)); - } - - @Test - void testExportConstructor() { - when(mongoDBConfig.getIngestionMode()).thenReturn(MongoDBConfig.IngestionMode.EXPORT); - doNothing().when(sourceCoordinator).giveUpPartitions(); - MongoDBSource mongoDBSource = new MongoDBSource( - mongoDBConfig, - pluginMetrics, - pipelineDescription, - acknowledgementSetManager, - awsCredentialsSupplier, - null, - null); - mongoDBSource.setSourceCoordinator(sourceCoordinator); - assertThat(mongoDBSource.getPartitionProgressStateClass(), equalTo(MongoDBSnapshotProgressState.class)); - assertThat(mongoDBSource.getDecoder(), instanceOf(ByteDecoder.class)); - try (MockedStatic mockedStatic = mockStatic((MongoDBService.class))) { - mongoDBService = mock(MongoDBService.class); - doNothing().when(mongoDBService).start(); - doNothing().when(mongoDBService).stop(); - mockedStatic.when(() -> MongoDBService.create(any(), any(), any(), any(), any())).thenReturn(mongoDBService); - mongoDBSource.start(buffer); - verify(mongoDBService).start(); - mongoDBSource.stop(); - verify(mongoDBService).stop(); - verify(sourceCoordinator).giveUpPartitions(); - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MySQLSourceTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MySQLSourceTest.java deleted file mode 100644 index 3c0fbb0046..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/MySQLSourceTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MySQLConfig; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -@ExtendWith(MockitoExtension.class) -public class MySQLSourceTest { - @Mock - private MySQLConfig mySQLConfig; - - @Mock - private PluginMetrics pluginMetrics; - - @Mock - private PipelineDescription pipelineDescription; - - @Test - void testConstructorValidations() { - assertThrows(IllegalArgumentException.class, () -> new MySQLSource(mySQLConfig, pluginMetrics, pipelineDescription, null, null)); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/PostgreSQLSourceTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/PostgreSQLSourceTest.java deleted file mode 100644 index 2cdf8973cf..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/PostgreSQLSourceTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.configuration.PipelineDescription; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.PostgreSQLConfig; - -import static org.junit.jupiter.api.Assertions.assertThrows; - -@ExtendWith(MockitoExtension.class) -public class PostgreSQLSourceTest { - @Mock - private PostgreSQLConfig postgreSQLConfig; - - @Mock - private PluginMetrics pluginMetrics; - - @Mock - private PipelineDescription pipelineDescription; - - @Test - void testConstructorValidations() { - assertThrows(IllegalArgumentException.class, () -> new PostgreSQLSource(postgreSQLConfig, pluginMetrics, pipelineDescription, null, null)); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBPartitionCreationSupplierTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBPartitionCreationSupplierTest.java deleted file mode 100644 index c5133acf6a..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBPartitionCreationSupplierTest.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import com.mongodb.client.FindIterable; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.MongoCursor; -import com.mongodb.client.MongoDatabase; -import org.bson.Document; -import org.bson.conversions.Bson; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.model.source.coordinator.PartitionIdentifier; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.CredentialsConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; - -import java.time.Instant; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockStatic; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -public class MongoDBPartitionCreationSupplierTest { - private static String TEST_COLLECTION_NAME = "test.collection"; - @Mock - private MongoDBConfig mongoDBConfig; - - @Mock - private MongoDBConfig.CollectionConfig collectionConfig; - - private MongoDBPartitionCreationSupplier testSupplier; - - @BeforeEach - public void setup() { - mongoDBConfig = mock(MongoDBConfig.class); - collectionConfig = mock(MongoDBConfig.CollectionConfig.class); - lenient().when(collectionConfig.getCollectionName()).thenReturn(TEST_COLLECTION_NAME); - lenient().when(mongoDBConfig.getCollections()).thenReturn(Collections.singletonList(collectionConfig)); - lenient().when(mongoDBConfig.getCredentialsConfig()).thenReturn(new CredentialsConfig(new CredentialsConfig.PlainText("user", "user"), null)); - lenient().when(mongoDBConfig.getExportConfig()).thenReturn(new MongoDBConfig.ExportConfig()); - testSupplier = new MongoDBPartitionCreationSupplier(mongoDBConfig); - } - - @Test - public void test_returnEmptyPartitionListIfAlreadyPartitioned() { - final Map globalStateMap = new HashMap<>(); - final Map partitionedCollections = new HashMap<>(); - partitionedCollections.put(TEST_COLLECTION_NAME, Instant.now().toEpochMilli()); - globalStateMap.put(MongoDBPartitionCreationSupplier.GLOBAL_STATE_PARTITIONED_COLLECTION_KEY, partitionedCollections); - List partitions = testSupplier.apply(globalStateMap); - assert (partitions.isEmpty()); - } - - @Test - public void test_returnPartitionsForCollection() { - try (MockedStatic mockedMongoClientsStatic = mockStatic(MongoClients.class)) { - // Given a collection with 5000 items which should be split to two partitions: 0-3999 and 4000-4999 - MongoClient mongoClient = mock(MongoClient.class); - MongoDatabase mongoDatabase = mock(MongoDatabase.class); - MongoCollection col = mock(MongoCollection.class); - FindIterable findIterable = mock(FindIterable.class); - MongoCursor cursor = mock(MongoCursor.class); - mockedMongoClientsStatic.when(() -> MongoClients.create(anyString())).thenReturn(mongoClient); - when(mongoClient.getDatabase(anyString())).thenReturn(mongoDatabase); - when(mongoDatabase.getCollection(anyString())).thenReturn(col); - when(col.find()).thenReturn(findIterable); - when(col.find(any(Bson.class))).thenReturn(findIterable); - when(findIterable.projection(any())).thenReturn(findIterable); - when(findIterable.sort(any())).thenReturn(findIterable); - when(findIterable.skip(anyInt())).thenReturn(findIterable); - when(findIterable.limit(anyInt())).thenReturn(findIterable); - when(findIterable.iterator()).thenReturn(cursor); - when(cursor.hasNext()).thenReturn(true, true, false); - // mock startDoc and endDoc returns, 0-3999, and 4000-4999 - when(cursor.next()) - .thenReturn(new Document("_id", "0")) - .thenReturn(new Document("_id", "4000")); - when(findIterable.first()) - .thenReturn(new Document("_id", "3999")) - .thenReturn(null) - .thenReturn(new Document("_id", "4999")); - // When Apply Partition create logics - final Map globalStateMap = new HashMap<>(); - List partitions = testSupplier.apply(globalStateMap); - // Then dependencies are called - verify(mongoClient).getDatabase(eq("test")); - verify(mongoClient, times(1)).close(); - verify(mongoDatabase).getCollection(eq("collection")); - // And partitions are created - assertThat(partitions.size(), is(2)); - assertThat(partitions.get(0).getPartitionKey(), is("test.collection|0|3999|java.lang.String")); - assertThat(partitions.get(1).getPartitionKey(), is("test.collection|4000|4999|java.lang.String")); - } - } - - @Test - public void test_returnPartitionsForCollection_error() { - when(collectionConfig.getCollectionName()).thenReturn("invalidDBName"); - final Map globalStateMap = new HashMap<>(); - assertThrows(IllegalArgumentException.class, () -> testSupplier.apply(globalStateMap)); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBServiceTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBServiceTest.java deleted file mode 100644 index a460af7d4c..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBServiceTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.MockedConstruction; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSetManager; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.model.source.coordinator.SourceCoordinator; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; - -import java.util.Optional; - -import static org.mockito.Mockito.mockConstruction; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -public class MongoDBServiceTest { - @Mock - private MongoDBConfig mongoDBConfig; - - @Mock - private Buffer> buffer; - - @Mock - private AcknowledgementSetManager acknowledgementSetManager; - - @Mock - private SourceCoordinator sourceCoordinator; - - @Mock - private MongoDBPartitionCreationSupplier mongoDBPartitionCreationSupplier; - - @Mock - private PluginMetrics pluginMetrics; - - @Test - public void testConstructor() { - createObjectUnderTest(); - verify(sourceCoordinator).initialize(); - } - - @Test - public void testStartAndStop() throws InterruptedException { - when(sourceCoordinator.getNextPartition(mongoDBPartitionCreationSupplier)).thenReturn(Optional.empty()); - MongoDBService testObject = createObjectUnderTest(); - testObject.start(); - Thread.sleep(100); - testObject.stop(); - } - - private MongoDBService createObjectUnderTest() { - try (final MockedConstruction mockedConstruction = mockConstruction(MongoDBPartitionCreationSupplier.class, (mock, context) -> { - mongoDBPartitionCreationSupplier = mock; - })) { - return MongoDBService.create(mongoDBConfig, sourceCoordinator, buffer, acknowledgementSetManager, pluginMetrics); - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotWorkerTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotWorkerTest.java deleted file mode 100644 index ef4179d526..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/source/mongoDB/MongoDBSnapshotWorkerTest.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.source.mongoDB; - -import com.mongodb.client.FindIterable; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.MongoCursor; -import com.mongodb.client.MongoDatabase; -import io.micrometer.core.instrument.Counter; -import org.bson.Document; -import org.bson.conversions.Bson; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSet; -import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSetManager; -import org.opensearch.dataprepper.model.buffer.Buffer; -import org.opensearch.dataprepper.model.event.Event; -import org.opensearch.dataprepper.model.record.Record; -import org.opensearch.dataprepper.model.source.coordinator.SourceCoordinator; -import org.opensearch.dataprepper.model.source.coordinator.SourcePartition; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.CredentialsConfig; -import org.opensearch.dataprepper.plugins.kafkaconnect.configuration.MongoDBConfig; - -import java.util.List; -import java.util.Optional; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Consumer; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockStatic; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -public class MongoDBSnapshotWorkerTest { - @Mock - private SourceCoordinator sourceCoordinator; - @Mock - private Buffer> buffer; - @Mock - private MongoDBPartitionCreationSupplier mongoDBPartitionCreationSupplier; - @Mock - private PluginMetrics pluginMetrics; - @Mock - private AcknowledgementSetManager acknowledgementSetManager; - @Mock - private MongoDBConfig mongoDBConfig; - @Mock - private SourcePartition sourcePartition; - @Mock - private Counter counter; - private MongoDBSnapshotWorker testWorker; - private ExecutorService executorService; - - - @BeforeEach - public void setup() throws TimeoutException { - lenient().when(mongoDBConfig.getExportConfig()).thenReturn(new MongoDBConfig.ExportConfig()); - lenient().when(mongoDBConfig.getCredentialsConfig()).thenReturn(new CredentialsConfig(new CredentialsConfig.PlainText("user", "user"), null)); - lenient().when(buffer.isByteBuffer()).thenReturn(false); - lenient().doNothing().when(buffer).write(any(), anyInt()); - lenient().doNothing().when(sourceCoordinator).saveProgressStateForPartition(anyString(), any()); - lenient().when(pluginMetrics.counter(anyString())).thenReturn(counter); - executorService = Executors.newSingleThreadExecutor(); - testWorker = new MongoDBSnapshotWorker(sourceCoordinator, buffer, mongoDBPartitionCreationSupplier, pluginMetrics, acknowledgementSetManager, mongoDBConfig); - } - - @Test - public void test_shouldSleepIfNoPartitionRetrieved() throws InterruptedException { - when(sourceCoordinator.getNextPartition(mongoDBPartitionCreationSupplier)).thenReturn(Optional.empty()); - final Future future = executorService.submit(() -> testWorker.run()); - Thread.sleep(100); - executorService.shutdown(); - future.cancel(true); - assertThat(future.isCancelled(), equalTo(true)); - assertThat(executorService.awaitTermination(100, TimeUnit.MILLISECONDS), equalTo(true)); - } - - @ParameterizedTest - @CsvSource({ - "test.collection|0|1|java.lang.Integer", - "test.collection|0|1|java.lang.Double", - "test.collection|0|1|java.lang.String", - "test.collection|0|1|java.lang.Long", - "test.collection|000000000000000000000000|000000000000000000000001|org.bson.types.ObjectId" - }) - public void test_shouldProcessPartitionSuccess(final String partitionKey) throws InterruptedException, TimeoutException { - this.mockDependencyAndProcessPartition(partitionKey, true); - - final ArgumentCaptor> ingestDataCapture = ArgumentCaptor.forClass(Record.class); - verify(buffer, times(2)).write(ingestDataCapture.capture(), anyInt()); - List> capturedData = ingestDataCapture.getAllValues(); - String data1 = ((Event) capturedData.get(0).getData()).jsonBuilder().includeTags(null).toJsonString(); - String data2 = ((Event) capturedData.get(1).getData()).jsonBuilder().includeTags(null).toJsonString(); - assertThat(data1, is("{\"_id\":0,\"__source_db\":\"test\",\"__collection\":\"collection\",\"__op\":\"create\",\"__source_ts_ms\":0}")); - assertThat(data2, is("{\"_id\":1,\"__source_db\":\"test\",\"__collection\":\"collection\",\"__op\":\"create\",\"__source_ts_ms\":0}")); - } - - @Test - public void test_shouldProcessPartitionSuccess_byteBuffer() throws Exception { - when(buffer.isByteBuffer()).thenReturn(true); - doNothing().when(buffer).writeBytes(any(byte[].class), any(), anyInt()); - this.mockDependencyAndProcessPartition("test.collection|0|1|java.lang.Integer", true); - - final ArgumentCaptor ingestDataCapture = ArgumentCaptor.forClass(byte[].class); - verify(buffer, times(2)).writeBytes(ingestDataCapture.capture(), any(), anyInt()); - List capturedData = ingestDataCapture.getAllValues(); - String data1 = new String(capturedData.get(0)); - String data2 = new String(capturedData.get(1)); - assertThat(data1, is("{\"_id\":0,\"__source_db\":\"test\",\"__collection\":\"collection\",\"__op\":\"create\",\"__source_ts_ms\":0}")); - assertThat(data2, is("{\"_id\":1,\"__source_db\":\"test\",\"__collection\":\"collection\",\"__op\":\"create\",\"__source_ts_ms\":0}")); - } - - @Test - public void test_shouldProcessPartitionSuccess_ackEnabled() throws InterruptedException, TimeoutException { - MongoDBConfig.ExportConfig exportConfig = mock(MongoDBConfig.ExportConfig.class); - when(exportConfig.getAcknowledgements()).thenReturn(true); - when(mongoDBConfig.getExportConfig()).thenReturn(exportConfig); - final AcknowledgementSet acknowledgementSet = mock(AcknowledgementSet.class); - doAnswer(invocation -> { - Consumer consumer = invocation.getArgument(0); - consumer.accept(true); - return acknowledgementSet; - }).when(acknowledgementSetManager).create(any(Consumer.class), any()); - doNothing().when(sourceCoordinator).updatePartitionForAcknowledgmentWait(anyString(), any()); - - this.mockDependencyAndProcessPartition("test.collection|0|1|java.lang.Integer", true); - - final ArgumentCaptor> ingestDataCapture = ArgumentCaptor.forClass(Record.class); - verify(buffer, times(2)).write(ingestDataCapture.capture(), anyInt()); - List> capturedData = ingestDataCapture.getAllValues(); - String data1 = ((Event) capturedData.get(0).getData()).jsonBuilder().includeTags(null).toJsonString(); - String data2 = ((Event) capturedData.get(1).getData()).jsonBuilder().includeTags(null).toJsonString(); - assertThat(data1, is("{\"_id\":0,\"__source_db\":\"test\",\"__collection\":\"collection\",\"__op\":\"create\",\"__source_ts_ms\":0}")); - assertThat(data2, is("{\"_id\":1,\"__source_db\":\"test\",\"__collection\":\"collection\",\"__op\":\"create\",\"__source_ts_ms\":0}")); - } - - @Test - public void test_shouldGiveUpPartitionIfExceptionOccurred() throws InterruptedException { - doNothing().when(sourceCoordinator).giveUpPartitions(); - this.mockDependencyAndProcessPartition("invalidPartition", false); - verify(sourceCoordinator, times(1)).giveUpPartitions(); - } - - @Test - public void test_shouldCountFailureIfBufferFailed() throws Exception { - doThrow(new RuntimeException("")).when(buffer).write(any(), anyInt()); - this.mockDependencyAndProcessPartition("test.collection|0|1|java.lang.Integer", false); - final ArgumentCaptor progressStateCapture = ArgumentCaptor.forClass(MongoDBSnapshotProgressState.class); - verify(sourceCoordinator, times(1)).saveProgressStateForPartition(anyString(), progressStateCapture.capture()); - List progressStates = progressStateCapture.getAllValues(); - assertThat(progressStates.get(0).getTotal(), is(2L)); - assertThat(progressStates.get(0).getSuccess(), is(0L)); - assertThat(progressStates.get(0).getFailed(), is(2L)); - } - - @Test - public void test_shouldThreadSleepIfExceptionOccurred() throws InterruptedException { - doThrow(new RuntimeException("")).when(sourceCoordinator).getNextPartition(mongoDBPartitionCreationSupplier); - final Future future = executorService.submit(() -> testWorker.run()); - Thread.sleep(100); - executorService.shutdown(); - future.cancel(true); - assertThat(future.isCancelled(), equalTo(true)); - assertThat(executorService.awaitTermination(100, TimeUnit.MILLISECONDS), equalTo(true)); - } - - private void mockDependencyAndProcessPartition(String partitionKey, boolean shouldProcessSucceed) throws InterruptedException { - lenient().when(sourcePartition.getPartitionKey()).thenReturn(partitionKey); - lenient().doNothing().when(sourceCoordinator).completePartition(anyString(), anyBoolean()); - lenient().when(sourceCoordinator.getNextPartition(mongoDBPartitionCreationSupplier)) - .thenReturn(Optional.of(sourcePartition)) - .thenReturn(Optional.empty()); - - MongoClient mongoClient = mock(MongoClient.class); - MongoDatabase mongoDatabase = mock(MongoDatabase.class); - MongoCollection col = mock(MongoCollection.class); - FindIterable findIterable = mock(FindIterable.class); - MongoCursor cursor = mock(MongoCursor.class); - lenient().when(mongoClient.getDatabase(anyString())).thenReturn(mongoDatabase); - lenient().when(mongoDatabase.getCollection(anyString())).thenReturn(col); - lenient().when(col.find(any(Bson.class))).thenReturn(findIterable); - lenient().when(findIterable.iterator()).thenReturn(cursor); - lenient().when(cursor.hasNext()).thenReturn(true, true, false); - lenient().when(cursor.next()) - .thenReturn(new Document("_id", 0)) - .thenReturn(new Document("_id", 1)); - - final Future future = executorService.submit(() -> { - try (MockedStatic mockedMongoClientsStatic = mockStatic(MongoClients.class)) { - mockedMongoClientsStatic.when(() -> MongoClients.create(anyString())).thenReturn(mongoClient); - testWorker.run(); - } - }); - Thread.sleep(1000); - executorService.shutdown(); - future.cancel(true); - assertThat(future.isCancelled(), equalTo(true)); - assertThat(executorService.awaitTermination(1000, TimeUnit.MILLISECONDS), equalTo(true)); - if (shouldProcessSucceed) { - // Verify Results - verify(cursor, times(2)).next(); - - final ArgumentCaptor progressStateCapture = ArgumentCaptor.forClass(MongoDBSnapshotProgressState.class); - verify(sourceCoordinator, times(1)).saveProgressStateForPartition(eq(partitionKey), progressStateCapture.capture()); - List progressStates = progressStateCapture.getAllValues(); - assertThat(progressStates.get(0).getTotal(), is(2L)); - assertThat(progressStates.get(0).getSuccess(), is(2L)); - assertThat(progressStates.get(0).getFailed(), is(0L)); - - verify(mongoClient, times(1)).close(); - } - } -} - diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/ConnectorTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/ConnectorTest.java deleted file mode 100644 index f8d377ae37..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/ConnectorTest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.util; - -import org.junit.jupiter.api.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; - -public class ConnectorTest { - @Test - void testGettersOfConnector() { - final String name = "connectorName"; - final Boolean allowReplace = false; - final Map config = new HashMap<>(); - final Connector connector = new Connector(name, config, allowReplace); - assertThat(connector.getName(), is(name)); - assertThat(connector.getConfig(), is(config)); - assertThat(connector.getConfig().get("name"), is(name)); - assertThat(connector.getAllowReplace(), is(allowReplace)); - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/KafkaConnectTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/KafkaConnectTest.java deleted file mode 100644 index eb13027378..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/KafkaConnectTest.java +++ /dev/null @@ -1,342 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.util; - -import org.apache.kafka.connect.connector.policy.ConnectorClientConfigOverridePolicy; -import org.apache.kafka.connect.errors.AlreadyExistsException; -import org.apache.kafka.connect.errors.NotFoundException; -import org.apache.kafka.connect.runtime.Connect; -import org.apache.kafka.connect.runtime.Herder; -import org.apache.kafka.connect.runtime.Worker; -import org.apache.kafka.connect.runtime.WorkerConfigTransformer; -import org.apache.kafka.connect.runtime.distributed.DistributedConfig; -import org.apache.kafka.connect.runtime.distributed.DistributedHerder; -import org.apache.kafka.connect.runtime.distributed.NotLeaderException; -import org.apache.kafka.connect.runtime.isolation.Plugins; -import org.apache.kafka.connect.runtime.rest.ConnectRestServer; -import org.apache.kafka.connect.runtime.rest.RestClient; -import org.apache.kafka.connect.runtime.rest.RestServer; -import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo; -import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; -import org.apache.kafka.connect.runtime.rest.entities.ConnectorType; -import org.apache.kafka.connect.storage.Converter; -import org.apache.kafka.connect.storage.KafkaOffsetBackingStore; -import org.apache.kafka.connect.util.Callback; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.mockito.MockedConstruction; -import org.mockito.MockedStatic; -import org.opensearch.dataprepper.metrics.PluginMetrics; -import org.opensearch.dataprepper.plugins.kafkaconnect.extension.WorkerProperties; -import org.opensearch.dataprepper.plugins.kafkaconnect.meter.KafkaConnectMetrics; - -import java.net.URI; -import java.time.Clock; -import java.time.Duration; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockConstruction; -import static org.mockito.Mockito.mockStatic; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class KafkaConnectTest { - private static final String TEST_PIPELINE_NAME = "test"; - private static final WorkerProperties DEFAULT_WORDER_PROPERTY = new WorkerProperties(); - private static final long TEST_CONNECTOR_TIMEOUT_MS = 360000L; // 360 seconds - private static final long TEST_CONNECT_TIMEOUT_MS = 60000L; // 60 seconds - private static final Duration TEST_CONNECTOR_TIMEOUT = Duration.ofMillis(TEST_CONNECTOR_TIMEOUT_MS); - private static final Duration TEST_CONNECT_TIMEOUT = Duration.ofMillis(TEST_CONNECT_TIMEOUT_MS); - @Mock - private KafkaConnectMetrics kafkaConnectMetrics; - - @Mock - private PluginMetrics pluginMetrics; - - @Mock - private DistributedHerder distributedHerder; - - @Mock - private RestServer rest; - - @Mock - private Connect connect; - - - @BeforeEach - void setUp() throws Exception { - kafkaConnectMetrics = mock(KafkaConnectMetrics.class); - distributedHerder = mock(DistributedHerder.class); - rest = mock(RestServer.class); - connect = mock(Connect.class); - DEFAULT_WORDER_PROPERTY.setBootstrapServers("localhost:9002"); - - lenient().when(connect.isRunning()).thenReturn(false).thenReturn(true); - lenient().when(distributedHerder.connectors()).thenReturn(new ArrayList<>()); - ConnectorStateInfo runningState = new ConnectorStateInfo("newConnector", new ConnectorStateInfo.ConnectorState("RUNNING", "worker", "msg"), new ArrayList<>(), ConnectorType.SOURCE); - lenient().when(distributedHerder.connectorStatus(any())).thenReturn(runningState); - lenient().doAnswer(invocation -> { - Callback> callback = invocation.getArgument(1); - // Simulate a successful completion - callback.onCompletion(null, null); - return null; - }).when(distributedHerder).connectorConfig(any(), any(Callback.class)); - lenient().doAnswer(invocation -> { - Callback> callback = invocation.getArgument(3); - // Simulate a successful completion - callback.onCompletion(null, null); - return null; - }).when(distributedHerder).putConnectorConfig(any(String.class), any(Map.class), any(Boolean.class), any(Callback.class)); - lenient().doAnswer(invocation -> { - Callback> callback = invocation.getArgument(1); - // Simulate a successful completion - callback.onCompletion(null, null); - return null; - }).when(distributedHerder).deleteConnectorConfig(any(), any(Callback.class)); - } - - @Test - void testInitializeKafkaConnectWithSingletonForSamePipeline() { - final KafkaConnect kafkaConnect = KafkaConnect.getPipelineInstance(TEST_PIPELINE_NAME, pluginMetrics, TEST_CONNECT_TIMEOUT, TEST_CONNECTOR_TIMEOUT); - final KafkaConnect sameConnect = KafkaConnect.getPipelineInstance(TEST_PIPELINE_NAME, pluginMetrics, TEST_CONNECT_TIMEOUT, TEST_CONNECTOR_TIMEOUT); - assertThat(sameConnect, is(kafkaConnect)); - final String anotherPipeline = "anotherPipeline"; - final KafkaConnect anotherKafkaConnect = KafkaConnect.getPipelineInstance(anotherPipeline, pluginMetrics, TEST_CONNECT_TIMEOUT, TEST_CONNECTOR_TIMEOUT); - assertThat(anotherKafkaConnect, not(kafkaConnect)); - } - - @Test - void testInitializeKafkaConnect() { - Map workerProps = DEFAULT_WORDER_PROPERTY.buildKafkaConnectPropertyMap(); - try (MockedConstruction mockedConfig = mockConstruction(DistributedConfig.class, (mock, context) -> { - when(mock.kafkaClusterId()).thenReturn("test-cluster-id"); - when(mock.getString(any())).thenReturn("test-string"); - }); - MockedConstruction mockedRestClient = mockConstruction(RestClient.class); - MockedConstruction mockedHerder = mockConstruction(DistributedHerder.class); - MockedConstruction mockedRestServer = mockConstruction(ConnectRestServer.class, (mock, context) -> { - when(mock.advertisedUrl()).thenReturn(URI.create("localhost:9002")); - }); - MockedConstruction mockedPlugin = mockConstruction(Plugins.class, (mock, context) -> { - ClassLoader classLoader = mock(ClassLoader.class); - ConnectorClientConfigOverridePolicy connectorPolicy = mock(ConnectorClientConfigOverridePolicy.class); - when(mock.compareAndSwapWithDelegatingLoader()).thenReturn(classLoader); - when(mock.newPlugin(any(), any(), any())).thenReturn(connectorPolicy); - }); - MockedConstruction mockedWorker = mockConstruction(Worker.class, (mock, context) -> { - WorkerConfigTransformer configTransformer = mock(WorkerConfigTransformer.class); - Converter converter = mock(Converter.class); - when(mock.configTransformer()).thenReturn(configTransformer); - when(mock.getInternalValueConverter()).thenReturn(converter); - }); - MockedConstruction mockedOffsetStore = mockConstruction(KafkaOffsetBackingStore.class, (mock, context) -> { - doNothing().when(mock).configure(any()); - }) - ) { - final KafkaConnect kafkaConnect = KafkaConnect.getPipelineInstance(TEST_PIPELINE_NAME, pluginMetrics, TEST_CONNECT_TIMEOUT, TEST_CONNECTOR_TIMEOUT); - kafkaConnect.initialize(workerProps); - } - } - - @Test - void testStartKafkaConnectSuccess() { - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - doNothing().when(rest).initializeServer(); - doNothing().when(connect).start(); - kafkaConnect.start(); - verify(rest).initializeServer(); - verify(connect).start(); - } - - @Test - void testStartKafkaConnectFail() { - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - doNothing().when(rest).initializeServer(); - doThrow(new RuntimeException()).when(connect).start(); - doNothing().when(connect).stop(); - assertThrows(RuntimeException.class, kafkaConnect::start); - verify(connect, times(1)).stop(); - - // throw exception immediately if connect is null - final KafkaConnect kafkaConnect2 = new KafkaConnect(distributedHerder, rest, null, kafkaConnectMetrics); - assertThrows(RuntimeException.class, kafkaConnect2::start); - } - - @Test - void testStartKafkaConnectFailTimeout() { - doNothing().when(rest).initializeServer(); - doNothing().when(connect).start(); - doNothing().when(connect).stop(); - when(connect.isRunning()).thenReturn(false); - try (MockedStatic mockedStatic = mockStatic(Clock.class)) { - final Clock clock = mock(Clock.class); - mockedStatic.when(() -> Clock.systemUTC()).thenReturn(clock); - when(clock.millis()).thenReturn(0L).thenReturn(TEST_CONNECT_TIMEOUT_MS + 1); - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - assertThrows(RuntimeException.class, kafkaConnect::start); - verify(rest).initializeServer(); - verify(connect).start(); - verify(connect).stop(); - verify(clock, times(2)).millis(); - } - } - - @Test - void testStartKafkaConnectWithConnectRunningAlready() { - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - when(connect.isRunning()).thenReturn(true); - kafkaConnect.start(); - verify(rest, never()).initializeServer(); - verify(connect, never()).start(); - } - - @Test - void testStopKafkaConnect() { - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - kafkaConnect.stop(); - verify(connect).stop(); - // should ignore stop if connect is null - final KafkaConnect kafkaConnect2 = new KafkaConnect(distributedHerder, rest, null, kafkaConnectMetrics); - kafkaConnect2.stop(); - } - - @Test - void testInitConnectorsWhenStartKafkaConnectSuccess() { - final String oldConnectorName = "oldConnector"; - final Connector newConnector = mock(Connector.class); - final String newConnectorName = "newConnector"; - final Map newConnectorConfig = new HashMap<>(); - when(newConnector.getName()).thenReturn(newConnectorName); - when(newConnector.getConfig()).thenReturn(newConnectorConfig); - when(distributedHerder.connectors()).thenReturn(List.of(oldConnectorName)); - - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - kafkaConnect.addConnectors(List.of(newConnector)); - kafkaConnect.start(); - verify(distributedHerder).putConnectorConfig(eq(newConnectorName), eq(newConnectorConfig), eq(true), any(Callback.class)); - verify(distributedHerder).deleteConnectorConfig(eq(oldConnectorName), any(Callback.class)); - } - - @Test - void testInitConnectorsWithoutConnectorConfigChange() { - final Connector newConnector = mock(Connector.class); - final String newConnectorName = "newConnector"; - final Map newConnectorConfig = new HashMap<>(); - when(newConnector.getName()).thenReturn(newConnectorName); - when(newConnector.getConfig()).thenReturn(newConnectorConfig); - when(newConnector.getAllowReplace()).thenReturn(false); - doAnswer(invocation -> { - Callback> callback = invocation.getArgument(1); - // Simulate a successful completion - callback.onCompletion(null, newConnectorConfig); - return null; - }).when(distributedHerder).connectorConfig(any(), any(Callback.class)); - - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - kafkaConnect.addConnectors(List.of(newConnector)); - kafkaConnect.start(); - verify(distributedHerder).putConnectorConfig(eq(newConnectorName), eq(newConnectorConfig), eq(false), any(Callback.class)); - } - - @Test - void testInitConnectorsErrorsWhenDeleteConnector() { - final String oldConnectorName = "oldConnector"; - when(distributedHerder.connectors()).thenReturn(List.of(oldConnectorName)); - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - doAnswer(invocation -> { - Callback> callback = invocation.getArgument(1); - // Simulate a successful completion - callback.onCompletion(new RuntimeException(), null); - return null; - }).when(distributedHerder).deleteConnectorConfig(eq(oldConnectorName), any(Callback.class)); - assertThrows(RuntimeException.class, kafkaConnect::start); - // NotLeaderException or NotFoundException should be ignored. - doAnswer(invocation -> { - Callback> callback = invocation.getArgument(1); - callback.onCompletion(new NotLeaderException("Only Leader can delete.", "leaderUrl"), null); - return null; - }).when(distributedHerder).deleteConnectorConfig(eq(oldConnectorName), any(Callback.class)); - kafkaConnect.start(); - doAnswer(invocation -> { - Callback> callback = invocation.getArgument(1); - // Simulate a successful completion - callback.onCompletion(new NotFoundException("Not Found"), null); - return null; - }).when(distributedHerder).deleteConnectorConfig(eq(oldConnectorName), any(Callback.class)); - kafkaConnect.start(); - } - - @Test - void testInitConnectorsErrorsWhenPutConnector() { - final Connector newConnector = mock(Connector.class); - final String newConnectorName = "newConnector"; - final Map newConnectorConfig = new HashMap<>(); - when(newConnector.getName()).thenReturn(newConnectorName); - when(newConnector.getConfig()).thenReturn(newConnectorConfig); - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - kafkaConnect.addConnectors(List.of(newConnector)); - // RuntimeException should be thrown - doAnswer(invocation -> { - Callback> callback = invocation.getArgument(3); - callback.onCompletion(new RuntimeException(), null); - return null; - }).when(distributedHerder).putConnectorConfig(eq(newConnectorName), eq(newConnectorConfig), eq(true), any(Callback.class)); - assertThrows(RuntimeException.class, kafkaConnect::start); - // NotLeaderException or NotFoundException should be ignored. - doAnswer(invocation -> { - Callback> callback = invocation.getArgument(3); - callback.onCompletion(new NotLeaderException("not leader", "leaderUrl"), null); - return null; - }).when(distributedHerder).putConnectorConfig(eq(newConnectorName), eq(newConnectorConfig), eq(true), any(Callback.class)); - kafkaConnect.start(); - doAnswer(invocation -> { - Callback> callback = invocation.getArgument(3); - callback.onCompletion(new AlreadyExistsException("Already added"), null); - return null; - }).when(distributedHerder).putConnectorConfig(eq(newConnectorName), eq(newConnectorConfig), eq(true), any(Callback.class)); - kafkaConnect.start(); - } - - @Test - void testInitConnectorsErrorsWhenConnectorsNotRunning() { - // should throw exception if connector failed in Running state for 30 seconds - final Connector newConnector = mock(Connector.class); - final String newConnectorName = "newConnector"; - final Map newConnectorConfig = new HashMap<>(); - when(newConnector.getName()).thenReturn(newConnectorName); - when(newConnector.getConfig()).thenReturn(newConnectorConfig); - when(distributedHerder.connectorStatus(eq(newConnectorName))).thenReturn(null); - - try (MockedStatic mockedStatic = mockStatic(Clock.class)) { - final Clock clock = mock(Clock.class); - mockedStatic.when(() -> Clock.systemUTC()).thenReturn(clock); - when(clock.millis()).thenReturn(0L).thenReturn(0L).thenReturn(0L).thenReturn(0L).thenReturn(TEST_CONNECTOR_TIMEOUT_MS + 1); - final KafkaConnect kafkaConnect = new KafkaConnect(distributedHerder, rest, connect, kafkaConnectMetrics); - kafkaConnect.addConnectors(List.of(newConnector)); - assertThrows(RuntimeException.class, kafkaConnect::start); - verify(distributedHerder, times(1)).connectorStatus(any()); - verify(distributedHerder).putConnectorConfig(eq(newConnectorName), eq(newConnectorConfig), eq(true), any(Callback.class)); - verify(clock, times(5)).millis(); - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/SecretManagerHelperTest.java b/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/SecretManagerHelperTest.java deleted file mode 100644 index 207d651108..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafkaconnect/util/SecretManagerHelperTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.plugins.kafkaconnect.util; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; -import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; -import software.amazon.awssdk.services.secretsmanager.SecretsManagerClient; -import software.amazon.awssdk.services.secretsmanager.SecretsManagerClientBuilder; -import software.amazon.awssdk.services.secretsmanager.model.GetSecretValueRequest; -import software.amazon.awssdk.services.secretsmanager.model.GetSecretValueResponse; -import software.amazon.awssdk.services.sts.StsClient; -import software.amazon.awssdk.services.sts.StsClientBuilder; -import software.amazon.awssdk.services.sts.auth.StsAssumeRoleCredentialsProvider; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockStatic; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class SecretManagerHelperTest { - private final String expectedSecretString = "expectedSecret"; - private final String testStsRole = "testRole"; - private final String testRegion = "testRegion"; - private final String testSecretId = "testSecritId"; - @Mock - private SecretsManagerClientBuilder secretsManagerClientBuilder; - @Mock - private SecretsManagerClient secretsManagerClient; - @Mock - private GetSecretValueResponse getSecretValueResponse; - - @BeforeEach - void setup() { - secretsManagerClientBuilder = mock(SecretsManagerClientBuilder.class); - secretsManagerClient = mock(SecretsManagerClient.class); - getSecretValueResponse = mock(GetSecretValueResponse.class); - lenient().when(secretsManagerClientBuilder.overrideConfiguration(any(ClientOverrideConfiguration.class))).thenReturn(secretsManagerClientBuilder); - lenient().when(secretsManagerClientBuilder.credentialsProvider(any(AwsCredentialsProvider.class))).thenReturn(secretsManagerClientBuilder); - lenient().when(secretsManagerClientBuilder.region(any())).thenReturn(secretsManagerClientBuilder); - lenient().when(secretsManagerClientBuilder.build()).thenReturn(secretsManagerClient); - lenient().when(secretsManagerClient.getSecretValue(any(GetSecretValueRequest.class))).thenReturn(getSecretValueResponse); - lenient().when(getSecretValueResponse.secretString()).thenReturn(expectedSecretString); - } - - @Test - void test_get_secret_without_sts() { - try (MockedStatic mockedStatic = mockStatic(SecretsManagerClient.class)) { - mockedStatic.when(() -> SecretsManagerClient.builder()).thenReturn(secretsManagerClientBuilder); - String result = SecretManagerHelper.getSecretValue("", testRegion, testSecretId); - assertThat(result, is(expectedSecretString)); - verify(secretsManagerClientBuilder, times(1)).credentialsProvider(any(AwsCredentialsProvider.class)); - } - } - - @Test - void test_get_secret_with_sts() { - try (MockedStatic mockedSts = mockStatic(StsClient.class); - MockedStatic mockedStatic = mockStatic(SecretsManagerClient.class)) { - StsClient stsClient = mock(StsClient.class); - StsClientBuilder stsClientBuilder = mock(StsClientBuilder.class); - when(stsClientBuilder.overrideConfiguration(any(ClientOverrideConfiguration.class))).thenReturn(stsClientBuilder); - when(stsClientBuilder.credentialsProvider(any(AwsCredentialsProvider.class))).thenReturn(stsClientBuilder); - when(stsClientBuilder.region(any())).thenReturn(stsClientBuilder); - when(stsClientBuilder.build()).thenReturn(stsClient); - - mockedSts.when(() -> StsClient.builder()).thenReturn(stsClientBuilder); - mockedStatic.when(() -> SecretsManagerClient.builder()).thenReturn(secretsManagerClientBuilder); - String result = SecretManagerHelper.getSecretValue(testStsRole, testRegion, testSecretId); - assertThat(result, is(expectedSecretString)); - verify(secretsManagerClientBuilder, times(1)).credentialsProvider(any(StsAssumeRoleCredentialsProvider.class)); - } - } -} diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-data-prepper-config-with-default-kafka-connect-config-extension.yaml b/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-data-prepper-config-with-default-kafka-connect-config-extension.yaml deleted file mode 100644 index 753d4f9f5d..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-data-prepper-config-with-default-kafka-connect-config-extension.yaml +++ /dev/null @@ -1,8 +0,0 @@ -extensions: - kafka_connect_config: - worker_properties: - group_id: test-group - client_id: test-client - config_storage_topic: test-configs - offset_storage_topic: test-offsets - status_storage_topic: test-status \ No newline at end of file diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-data-prepper-config-with-kafka-connect-config-extension.yaml b/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-data-prepper-config-with-kafka-connect-config-extension.yaml deleted file mode 100644 index e41c7d04fe..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-data-prepper-config-with-kafka-connect-config-extension.yaml +++ /dev/null @@ -1,19 +0,0 @@ -extensions: - kafka_connect_config: - bootstrap_servers: - - test:123 - connect_start_timeout: 3000ms - connector_start_timeout: 3s - worker_properties: - group_id: test-group - client_id: test-client - config_storage_topic: test-configs - offset_storage_topic: test-offsets - status_storage_topic: test-status - offset_storage_partitions: 2 #optional and default is 25 - offset_flush_interval: 6s #optional and default is 60000 (60s) - offset_flush_timeout: 500ms #optional and default is 5000 (5s) - status_storage_partitions: 1 #optional and default is 5 - heartbeat_interval: 300ms #optional and default is 3000 (3s) - session_timeout: 3s #optional and default is 30000 (30s) - connector_rebalance_max_delay: 60s \ No newline at end of file diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-mongodb-pipeline.yaml b/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-mongodb-pipeline.yaml deleted file mode 100644 index 0b10fe2891..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-mongodb-pipeline.yaml +++ /dev/null @@ -1,14 +0,0 @@ -log-pipeline: - source: - mongodb: - hostname: localhost - ingestion_mode: export_stream - credentials: - plaintext: - username: debezium - password: dbz - collections: - - topic_prefix: prefix1 - collection: test.customers - sink: - - noop: \ No newline at end of file diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-mysql-pipeline.yaml b/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-mysql-pipeline.yaml deleted file mode 100644 index bdbaeff015..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-mysql-pipeline.yaml +++ /dev/null @@ -1,13 +0,0 @@ -log-pipeline: - source: - mysql: - hostname: localhost - credentials: - plaintext: - username: debezium - password: dbz - tables: - - topic_prefix: prefix1 - table: inventory.customers - sink: - - noop: \ No newline at end of file diff --git a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-postgres-pipeline.yaml b/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-postgres-pipeline.yaml deleted file mode 100644 index c843e3ccff..0000000000 --- a/data-prepper-plugins/kafka-connect-plugins/src/test/resources/sample-postgres-pipeline.yaml +++ /dev/null @@ -1,14 +0,0 @@ -log-pipeline: - source: - postgresql: - hostname: localhost - credentials: - plaintext: - username: debezium - password: dbz - tables: - - topic_prefix: psql - database: postgres - table: public.customers - sink: - - noop: \ No newline at end of file diff --git a/settings.gradle b/settings.gradle index 2b6e655a37..590648b2e9 100644 --- a/settings.gradle +++ b/settings.gradle @@ -134,7 +134,6 @@ include 'data-prepper-plugins:failures-common' include 'data-prepper-plugins:newline-codecs' include 'data-prepper-plugins:avro-codecs' include 'data-prepper-plugins:kafka-plugins' -//include 'data-prepper-plugins:kafka-connect-plugins' include 'data-prepper-plugins:user-agent-processor' include 'data-prepper-plugins:in-memory-source-coordination-store' include 'data-prepper-plugins:aws-plugin-api'